2017-03-25 20:15:00 +00:00
|
|
|
// TODO: cleanup includes
|
2017-04-09 02:24:32 +00:00
|
|
|
#include "cache.h"
|
2017-05-27 04:21:00 +00:00
|
|
|
#include "clang_complete.h"
|
2017-04-08 22:54:36 +00:00
|
|
|
#include "file_consumer.h"
|
2017-05-27 04:21:00 +00:00
|
|
|
#include "include_complete.h"
|
2017-02-25 23:59:09 +00:00
|
|
|
#include "indexer.h"
|
2017-09-22 01:14:57 +00:00
|
|
|
#include "ipc_manager.h"
|
2017-03-05 02:16:23 +00:00
|
|
|
#include "language_server_api.h"
|
2017-06-15 05:32:23 +00:00
|
|
|
#include "lex_utils.h"
|
2017-09-22 01:14:57 +00:00
|
|
|
#include "match.h"
|
2017-04-09 02:24:32 +00:00
|
|
|
#include "options.h"
|
2017-03-25 20:27:28 +00:00
|
|
|
#include "platform.h"
|
2017-09-22 01:14:57 +00:00
|
|
|
#include "project.h"
|
|
|
|
#include "query.h"
|
|
|
|
#include "query_utils.h"
|
2017-07-30 04:24:02 +00:00
|
|
|
#include "serializer.h"
|
2017-05-21 07:37:53 +00:00
|
|
|
#include "standard_includes.h"
|
2017-03-10 07:06:01 +00:00
|
|
|
#include "test.h"
|
2017-03-25 20:15:00 +00:00
|
|
|
#include "threaded_queue.h"
|
2017-09-22 01:14:57 +00:00
|
|
|
#include "timer.h"
|
2017-09-13 03:35:27 +00:00
|
|
|
#include "work_thread.h"
|
2017-03-26 21:40:34 +00:00
|
|
|
#include "working_files.h"
|
2017-03-05 02:16:23 +00:00
|
|
|
|
2017-03-25 20:27:28 +00:00
|
|
|
#include <doctest/doctest.h>
|
|
|
|
#include <rapidjson/istreamwrapper.h>
|
|
|
|
#include <rapidjson/ostreamwrapper.h>
|
2017-11-04 22:29:03 +00:00
|
|
|
#include <loguru.hpp>
|
2017-03-25 20:27:28 +00:00
|
|
|
|
2017-06-10 01:02:48 +00:00
|
|
|
#include <climits>
|
2017-03-31 05:30:50 +00:00
|
|
|
#include <fstream>
|
2017-04-17 01:22:59 +00:00
|
|
|
#include <functional>
|
2017-09-22 01:14:57 +00:00
|
|
|
#include <future>
|
2017-03-25 20:27:28 +00:00
|
|
|
#include <iostream>
|
2017-05-21 07:37:53 +00:00
|
|
|
#include <iterator>
|
2017-05-17 07:08:45 +00:00
|
|
|
#include <sstream>
|
2017-09-22 01:14:57 +00:00
|
|
|
#include <string>
|
2017-03-25 20:27:28 +00:00
|
|
|
#include <thread>
|
2017-09-22 01:14:57 +00:00
|
|
|
#include <unordered_map>
|
2017-03-25 20:27:28 +00:00
|
|
|
#include <vector>
|
2017-02-25 23:59:09 +00:00
|
|
|
|
2017-04-11 07:29:36 +00:00
|
|
|
// TODO: provide a feature like 'https://github.com/goldsborough/clang-expand',
|
|
|
|
// ie, a fully linear view of a function with inline function calls expanded.
|
|
|
|
// We can probably use vscode decorators to achieve it.
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
// TODO: implement ThreadPool type which monitors CPU usage / number of work
|
|
|
|
// items per second completed and scales up/down number of running threads.
|
2017-05-20 19:31:07 +00:00
|
|
|
|
2017-08-17 02:08:00 +00:00
|
|
|
namespace {
|
2017-04-09 02:24:32 +00:00
|
|
|
|
2017-04-21 04:06:15 +00:00
|
|
|
std::vector<std::string> kEmptyArgs;
|
2017-04-16 19:02:29 +00:00
|
|
|
|
2017-05-20 19:31:07 +00:00
|
|
|
// Expected client version. We show an error if this doesn't match.
|
2017-06-14 07:13:26 +00:00
|
|
|
const int kExpectedClientVersion = 3;
|
2017-04-16 19:02:29 +00:00
|
|
|
|
2017-05-20 08:07:29 +00:00
|
|
|
// Cached completion information, so we can give fast completion results when
|
|
|
|
// the user erases a character. vscode will resend the completion request if
|
|
|
|
// that happens.
|
|
|
|
struct CodeCompleteCache {
|
2017-06-30 06:51:22 +00:00
|
|
|
// NOTE: Make sure to access these variables under |WithLock|.
|
|
|
|
optional<std::string> cached_path_;
|
|
|
|
optional<lsPosition> cached_completion_position_;
|
|
|
|
NonElidedVector<lsCompletionItem> cached_results_;
|
2017-05-21 23:48:21 +00:00
|
|
|
|
2017-06-30 06:51:22 +00:00
|
|
|
std::mutex mutex_;
|
|
|
|
|
|
|
|
void WithLock(std::function<void()> action) {
|
|
|
|
std::lock_guard<std::mutex> lock(mutex_);
|
|
|
|
action();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool IsCacheValid(lsTextDocumentPositionParams position) {
|
|
|
|
std::lock_guard<std::mutex> lock(mutex_);
|
|
|
|
return cached_path_ == position.textDocument.uri.GetPath() &&
|
|
|
|
cached_completion_position_ == position.position;
|
2017-05-21 23:48:21 +00:00
|
|
|
}
|
2017-05-20 08:07:29 +00:00
|
|
|
};
|
2017-04-16 19:02:29 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
// This function returns true if e2e timing should be displayed for the given
|
|
|
|
// IpcId.
|
2017-05-20 08:07:29 +00:00
|
|
|
bool ShouldDisplayIpcTiming(IpcId id) {
|
|
|
|
switch (id) {
|
2017-09-22 01:14:57 +00:00
|
|
|
case IpcId::TextDocumentPublishDiagnostics:
|
|
|
|
case IpcId::CqueryPublishInactiveRegions:
|
2017-10-28 21:33:33 +00:00
|
|
|
case IpcId::Cout:
|
2017-09-22 01:14:57 +00:00
|
|
|
return false;
|
|
|
|
default:
|
|
|
|
return true;
|
2017-05-20 08:07:29 +00:00
|
|
|
}
|
|
|
|
}
|
2017-04-16 19:02:29 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
void PushBack(NonElidedVector<lsLocation>* result,
|
|
|
|
optional<lsLocation> location) {
|
2017-04-12 06:41:19 +00:00
|
|
|
if (location)
|
|
|
|
result->push_back(*location);
|
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
bool FindFileOrFail(QueryDatabase* db,
|
2017-10-28 22:09:14 +00:00
|
|
|
optional<lsRequestId> id,
|
2017-09-22 01:14:57 +00:00
|
|
|
const std::string& absolute_path,
|
|
|
|
QueryFile** out_query_file,
|
|
|
|
QueryFileId* out_file_id = nullptr) {
|
2017-10-28 22:09:14 +00:00
|
|
|
*out_query_file = nullptr;
|
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
auto it = db->usr_to_file.find(LowerPathIfCaseInsensitive(absolute_path));
|
2017-05-09 04:20:28 +00:00
|
|
|
if (it != db->usr_to_file.end()) {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFile& file = db->files[it->second.id];
|
|
|
|
if (file.def) {
|
|
|
|
*out_query_file = &file;
|
2017-05-27 07:10:21 +00:00
|
|
|
if (out_file_id)
|
|
|
|
*out_file_id = QueryFileId(it->second.id);
|
|
|
|
return true;
|
2017-04-22 07:32:29 +00:00
|
|
|
}
|
2017-04-11 05:43:01 +00:00
|
|
|
}
|
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
if (out_file_id)
|
|
|
|
*out_file_id = QueryFileId((size_t)-1);
|
|
|
|
|
2017-09-13 03:35:27 +00:00
|
|
|
LOG_S(INFO) << "Unable to find file \"" << absolute_path << "\"";
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-10-28 22:09:14 +00:00
|
|
|
if (id) {
|
|
|
|
Out_Error out;
|
|
|
|
out.id = *id;
|
|
|
|
out.error.code = lsErrorCodes::InternalError;
|
|
|
|
out.error.message = "Unable to find file " + absolute_path;
|
|
|
|
IpcManager::instance()->SendOutMessageToClient(IpcId::Cout, out);
|
|
|
|
}
|
2017-05-27 07:10:21 +00:00
|
|
|
|
|
|
|
return false;
|
2017-04-11 05:43:01 +00:00
|
|
|
}
|
|
|
|
|
2017-10-28 22:09:14 +00:00
|
|
|
void EmitInactiveLines(WorkingFile* working_file,
|
|
|
|
const std::vector<Range>& inactive_regions) {
|
2017-05-20 21:45:46 +00:00
|
|
|
Out_CquerySetInactiveRegion out;
|
|
|
|
out.params.uri = lsDocumentUri::FromPath(working_file->filename);
|
2017-10-28 22:09:14 +00:00
|
|
|
for (Range skipped : inactive_regions) {
|
|
|
|
optional<lsRange> ls_skipped = GetLsRange(working_file, skipped);
|
|
|
|
if (ls_skipped)
|
|
|
|
out.params.inactiveRegions.push_back(*ls_skipped);
|
2017-05-20 21:45:46 +00:00
|
|
|
}
|
2017-09-22 01:14:57 +00:00
|
|
|
IpcManager::instance()->SendOutMessageToClient(
|
|
|
|
IpcId::CqueryPublishInactiveRegions, out);
|
2017-05-20 21:45:46 +00:00
|
|
|
}
|
2017-04-15 05:14:05 +00:00
|
|
|
|
2017-11-09 07:06:32 +00:00
|
|
|
void EmitSemanticHighlighting(QueryDatabase* db,
|
|
|
|
WorkingFile* working_file,
|
|
|
|
QueryFile* file) {
|
|
|
|
auto map_symbol_kind_to_symbol_type = [](SymbolKind kind) {
|
|
|
|
switch (kind) {
|
|
|
|
case SymbolKind::Type:
|
|
|
|
return Out_CqueryPublishSemanticHighlighting::SymbolType::Type;
|
|
|
|
case SymbolKind::Func:
|
|
|
|
return Out_CqueryPublishSemanticHighlighting::SymbolType::Function;
|
|
|
|
case SymbolKind::Var:
|
|
|
|
return Out_CqueryPublishSemanticHighlighting::SymbolType::Variable;
|
|
|
|
default:
|
|
|
|
assert(false);
|
|
|
|
return Out_CqueryPublishSemanticHighlighting::SymbolType::Variable;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// Group symbols together.
|
2017-11-11 19:18:55 +00:00
|
|
|
std::unordered_map<SymbolIdx, Out_CqueryPublishSemanticHighlighting::Symbol>
|
|
|
|
grouped_symbols;
|
2017-11-09 07:06:32 +00:00
|
|
|
for (SymbolRef sym : file->def->all_symbols) {
|
2017-11-11 19:18:55 +00:00
|
|
|
bool is_type_member = false;
|
|
|
|
switch (sym.idx.kind) {
|
|
|
|
case SymbolKind::Func: {
|
|
|
|
QueryFunc* func = &db->funcs[sym.idx.idx];
|
|
|
|
if (!func->def)
|
|
|
|
continue; // applies to for loop
|
|
|
|
is_type_member = func->def->declaring_type.has_value();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case SymbolKind::Var: {
|
|
|
|
QueryVar* var = &db->vars[sym.idx.idx];
|
|
|
|
if (!var->def)
|
|
|
|
continue; // applies to for loop
|
|
|
|
if (!var->def->is_local && !var->def->declaring_type)
|
|
|
|
continue; // applies to for loop
|
|
|
|
is_type_member = var->def->declaring_type.has_value();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
continue; // applies to for loop
|
2017-11-09 07:06:32 +00:00
|
|
|
}
|
2017-11-11 19:18:55 +00:00
|
|
|
|
2017-11-09 07:06:32 +00:00
|
|
|
optional<lsRange> loc = GetLsRange(working_file, sym.loc.range);
|
2017-11-11 19:18:55 +00:00
|
|
|
if (loc) {
|
|
|
|
auto it = grouped_symbols.find(sym.idx);
|
|
|
|
if (it != grouped_symbols.end()) {
|
|
|
|
it->second.ranges.push_back(*loc);
|
|
|
|
} else {
|
|
|
|
Out_CqueryPublishSemanticHighlighting::Symbol symbol;
|
|
|
|
symbol.type = map_symbol_kind_to_symbol_type(sym.idx.kind);
|
|
|
|
symbol.is_type_member = is_type_member;
|
|
|
|
symbol.ranges.push_back(*loc);
|
|
|
|
grouped_symbols[sym.idx] = symbol;
|
|
|
|
}
|
|
|
|
}
|
2017-11-09 07:06:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Publish.
|
|
|
|
Out_CqueryPublishSemanticHighlighting out;
|
|
|
|
out.params.uri = lsDocumentUri::FromPath(working_file->filename);
|
2017-11-11 19:18:55 +00:00
|
|
|
for (auto& entry : grouped_symbols)
|
|
|
|
out.params.symbols.push_back(entry.second);
|
2017-11-09 07:06:32 +00:00
|
|
|
IpcManager::instance()->SendOutMessageToClient(
|
|
|
|
IpcId::CqueryPublishSemanticHighlighting, out);
|
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<int> FindIncludeLine(const std::vector<std::string>& lines,
|
|
|
|
const std::string& full_include_line) {
|
2017-05-29 23:57:19 +00:00
|
|
|
//
|
|
|
|
// This returns an include line. For example,
|
|
|
|
//
|
|
|
|
// #include <a> // 0
|
|
|
|
// #include <c> // 1
|
|
|
|
//
|
|
|
|
// Given #include <b>, this will return '1', which means that the
|
|
|
|
// #include <b> text should be inserted at the start of line 1. Inserting
|
|
|
|
// at the start of a line allows insertion at both the top and bottom of the
|
|
|
|
// document.
|
|
|
|
//
|
|
|
|
// If the include line is already in the document this returns nullopt.
|
|
|
|
//
|
|
|
|
|
|
|
|
optional<int> last_include_line;
|
|
|
|
optional<int> best_include_line;
|
|
|
|
|
|
|
|
// 1 => include line is gt content (ie, it should go after)
|
|
|
|
// -1 => include line is lt content (ie, it should go before)
|
|
|
|
int last_line_compare = 1;
|
|
|
|
|
|
|
|
for (int line = 0; line < (int)lines.size(); ++line) {
|
|
|
|
if (!StartsWith(lines[line], "#include")) {
|
|
|
|
last_line_compare = 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
last_include_line = line;
|
|
|
|
|
|
|
|
int current_line_compare = full_include_line.compare(lines[line]);
|
|
|
|
if (current_line_compare == 0)
|
|
|
|
return nullopt;
|
|
|
|
|
|
|
|
if (last_line_compare == 1 && current_line_compare == -1)
|
|
|
|
best_include_line = line;
|
|
|
|
last_line_compare = current_line_compare;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (best_include_line)
|
|
|
|
return *best_include_line;
|
|
|
|
// If |best_include_line| didn't match that means we likely didn't find an
|
|
|
|
// include which was lt the new one, so put it at the end of the last include
|
|
|
|
// list.
|
|
|
|
if (last_include_line)
|
|
|
|
return *last_include_line + 1;
|
|
|
|
// No includes, use top of document.
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<QueryFileId> GetImplementationFile(QueryDatabase* db,
|
|
|
|
QueryFileId file_id,
|
|
|
|
QueryFile* file) {
|
2017-10-17 05:44:58 +00:00
|
|
|
for (SymbolRef sym : file->def->outline) {
|
2017-05-29 21:18:35 +00:00
|
|
|
switch (sym.idx.kind) {
|
|
|
|
case SymbolKind::Func: {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFunc& func = db->funcs[sym.idx.idx];
|
2017-09-22 01:14:57 +00:00
|
|
|
// Note: we ignore the definition if it is in the same file (ie,
|
|
|
|
// possibly a header).
|
2017-10-17 05:44:58 +00:00
|
|
|
if (func.def && func.def->definition_extent &&
|
|
|
|
func.def->definition_extent->path != file_id) {
|
|
|
|
return func.def->definition_extent->path;
|
2017-09-22 01:14:57 +00:00
|
|
|
}
|
2017-05-29 21:18:35 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case SymbolKind::Var: {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryVar& var = db->vars[sym.idx.idx];
|
2017-09-22 01:14:57 +00:00
|
|
|
// Note: we ignore the definition if it is in the same file (ie,
|
|
|
|
// possibly a header).
|
2017-10-17 05:44:58 +00:00
|
|
|
if (var.def && var.def->definition_extent &&
|
|
|
|
var.def->definition_extent->path != file_id) {
|
|
|
|
return db->vars[sym.idx.idx].def->definition_extent->path;
|
2017-09-22 01:14:57 +00:00
|
|
|
}
|
2017-05-29 21:18:35 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// No associated definition, scan the project for a file in the same
|
|
|
|
// directory with the same base-name.
|
2017-10-17 05:44:58 +00:00
|
|
|
std::string original_path = LowerPathIfCaseInsensitive(file->def->path);
|
2017-05-29 21:18:35 +00:00
|
|
|
std::string target_path = original_path;
|
|
|
|
size_t last = target_path.find_last_of('.');
|
|
|
|
if (last != std::string::npos) {
|
|
|
|
target_path = target_path.substr(0, last);
|
|
|
|
}
|
|
|
|
|
2017-07-28 02:14:33 +00:00
|
|
|
LOG_S(INFO) << "!! Looking for impl file that starts with " << target_path;
|
2017-06-29 04:40:30 +00:00
|
|
|
|
2017-05-29 21:18:35 +00:00
|
|
|
for (auto& entry : db->usr_to_file) {
|
|
|
|
Usr path = entry.first;
|
|
|
|
|
|
|
|
// Do not consider header files for implementation files.
|
|
|
|
// TODO: make file extensions configurable.
|
|
|
|
if (EndsWith(path, ".h") || EndsWith(path, ".hpp"))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (StartsWith(path, target_path) && path != original_path) {
|
|
|
|
return entry.second;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nullopt;
|
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
void EnsureImplFile(QueryDatabase* db,
|
|
|
|
QueryFileId file_id,
|
|
|
|
optional<lsDocumentUri>& impl_uri,
|
|
|
|
optional<QueryFileId>& impl_file_id) {
|
2017-05-29 21:18:35 +00:00
|
|
|
if (!impl_uri.has_value()) {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFile& file = db->files[file_id.id];
|
|
|
|
assert(file.def);
|
2017-05-29 21:18:35 +00:00
|
|
|
|
2017-10-17 05:44:58 +00:00
|
|
|
impl_file_id = GetImplementationFile(db, file_id, &file);
|
2017-05-29 21:18:35 +00:00
|
|
|
if (!impl_file_id.has_value())
|
|
|
|
impl_file_id = file_id;
|
|
|
|
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFile& impl_file = db->files[impl_file_id->id];
|
|
|
|
if (impl_file.def)
|
|
|
|
impl_uri = lsDocumentUri::FromPath(impl_file.def->path);
|
2017-05-29 21:18:35 +00:00
|
|
|
else
|
2017-10-17 05:44:58 +00:00
|
|
|
impl_uri = lsDocumentUri::FromPath(file.def->path);
|
2017-05-29 21:18:35 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsTextEdit> BuildAutoImplementForFunction(QueryDatabase* db,
|
|
|
|
WorkingFiles* working_files,
|
|
|
|
WorkingFile* working_file,
|
|
|
|
int default_line,
|
|
|
|
QueryFileId decl_file_id,
|
|
|
|
QueryFileId impl_file_id,
|
|
|
|
QueryFunc& func) {
|
2017-10-17 05:44:58 +00:00
|
|
|
assert(func.def);
|
2017-05-29 21:18:35 +00:00
|
|
|
for (const QueryLocation& decl : func.declarations) {
|
|
|
|
if (decl.path != decl_file_id)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
optional<lsRange> ls_decl = GetLsRange(working_file, decl.range);
|
|
|
|
if (!ls_decl)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
optional<std::string> type_name;
|
|
|
|
optional<lsPosition> same_file_insert_end;
|
2017-10-17 05:44:58 +00:00
|
|
|
if (func.def->declaring_type) {
|
|
|
|
QueryType& declaring_type = db->types[func.def->declaring_type->id];
|
|
|
|
if (declaring_type.def) {
|
|
|
|
type_name = declaring_type.def->short_name;
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsRange> ls_type_def_extent = GetLsRange(
|
2017-10-17 05:44:58 +00:00
|
|
|
working_file, declaring_type.def->definition_extent->range);
|
2017-05-29 21:18:35 +00:00
|
|
|
if (ls_type_def_extent) {
|
|
|
|
same_file_insert_end = ls_type_def_extent->end;
|
2017-09-22 01:14:57 +00:00
|
|
|
same_file_insert_end->character += 1; // move past semicolon.
|
2017-05-29 21:18:35 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string insert_text;
|
|
|
|
int newlines_after_name = 0;
|
2017-09-22 01:14:57 +00:00
|
|
|
LexFunctionDeclaration(working_file->buffer_content, ls_decl->start,
|
|
|
|
type_name, &insert_text, &newlines_after_name);
|
2017-05-29 21:18:35 +00:00
|
|
|
|
|
|
|
if (!same_file_insert_end) {
|
|
|
|
same_file_insert_end = ls_decl->end;
|
|
|
|
same_file_insert_end->line += newlines_after_name;
|
|
|
|
same_file_insert_end->character = 1000;
|
|
|
|
}
|
|
|
|
|
|
|
|
lsTextEdit edit;
|
|
|
|
|
|
|
|
if (decl_file_id == impl_file_id) {
|
|
|
|
edit.range.start = *same_file_insert_end;
|
|
|
|
edit.range.end = *same_file_insert_end;
|
|
|
|
edit.newText = "\n\n" + insert_text;
|
2017-09-22 01:14:57 +00:00
|
|
|
} else {
|
2017-05-29 21:18:35 +00:00
|
|
|
lsPosition best_pos;
|
|
|
|
best_pos.line = default_line;
|
|
|
|
int best_dist = INT_MAX;
|
|
|
|
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFile& file = db->files[impl_file_id.id];
|
|
|
|
assert(file.def);
|
|
|
|
for (SymbolRef sym : file.def->outline) {
|
2017-05-29 21:18:35 +00:00
|
|
|
switch (sym.idx.kind) {
|
|
|
|
case SymbolKind::Func: {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFunc& sym_func = db->funcs[sym.idx.idx];
|
|
|
|
if (!sym_func.def || !sym_func.def->definition_extent)
|
2017-05-29 21:18:35 +00:00
|
|
|
break;
|
|
|
|
|
2017-10-17 05:44:58 +00:00
|
|
|
for (QueryLocation& func_decl : sym_func.declarations) {
|
2017-05-29 21:18:35 +00:00
|
|
|
if (func_decl.path == decl_file_id) {
|
|
|
|
int dist = func_decl.range.start.line - decl.range.start.line;
|
|
|
|
if (abs(dist) < abs(best_dist)) {
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsLocation> def_loc = GetLsLocation(
|
2017-10-17 05:44:58 +00:00
|
|
|
db, working_files, *sym_func.def->definition_extent);
|
2017-05-29 21:18:35 +00:00
|
|
|
if (!def_loc)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
best_dist = dist;
|
|
|
|
|
|
|
|
if (dist > 0)
|
|
|
|
best_pos = def_loc->range.start;
|
|
|
|
else
|
|
|
|
best_pos = def_loc->range.end;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case SymbolKind::Var: {
|
|
|
|
// TODO: handle vars.
|
|
|
|
break;
|
|
|
|
}
|
2017-06-17 05:40:03 +00:00
|
|
|
case SymbolKind::Invalid:
|
|
|
|
case SymbolKind::File:
|
|
|
|
case SymbolKind::Type:
|
2017-07-28 02:14:33 +00:00
|
|
|
LOG_S(WARNING) << "Unexpected SymbolKind "
|
|
|
|
<< static_cast<int>(sym.idx.kind);
|
2017-06-17 05:40:03 +00:00
|
|
|
break;
|
2017-05-29 21:18:35 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
edit.range.start = best_pos;
|
|
|
|
edit.range.end = best_pos;
|
|
|
|
if (best_dist < 0)
|
|
|
|
edit.newText = "\n\n" + insert_text;
|
|
|
|
else
|
|
|
|
edit.newText = insert_text + "\n\n";
|
|
|
|
}
|
|
|
|
|
|
|
|
return edit;
|
|
|
|
}
|
|
|
|
|
|
|
|
return nullopt;
|
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
void EmitDiagnostics(WorkingFiles* working_files,
|
|
|
|
std::string path,
|
|
|
|
NonElidedVector<lsDiagnostic> diagnostics) {
|
2017-06-10 04:13:16 +00:00
|
|
|
// Emit diagnostics.
|
|
|
|
Out_TextDocumentPublishDiagnostics diagnostic_response;
|
|
|
|
diagnostic_response.params.uri = lsDocumentUri::FromPath(path);
|
|
|
|
diagnostic_response.params.diagnostics = diagnostics;
|
2017-09-22 01:14:57 +00:00
|
|
|
IpcManager::instance()->SendOutMessageToClient(
|
|
|
|
IpcId::TextDocumentPublishDiagnostics, diagnostic_response);
|
2017-06-10 04:13:16 +00:00
|
|
|
|
|
|
|
// Cache diagnostics so we can show fixits.
|
2017-06-14 06:29:41 +00:00
|
|
|
working_files->DoActionOnFile(path, [&](WorkingFile* working_file) {
|
|
|
|
if (working_file)
|
|
|
|
working_file->diagnostics_ = diagnostics;
|
|
|
|
});
|
2017-06-10 04:13:16 +00:00
|
|
|
}
|
2017-04-15 05:14:05 +00:00
|
|
|
|
2017-06-16 02:28:49 +00:00
|
|
|
// Pre-filters completion responses before sending to vscode. This results in a
|
|
|
|
// significantly snappier completion experience as vscode is easily overloaded
|
|
|
|
// when given 1000+ completion items.
|
|
|
|
void FilterCompletionResponse(Out_TextDocumentComplete* complete_response,
|
|
|
|
const std::string& complete_text) {
|
2017-09-22 01:14:57 +00:00
|
|
|
// Used to inject more completions.
|
2017-06-16 02:28:49 +00:00
|
|
|
#if false
|
|
|
|
const size_t kNumIterations = 250;
|
|
|
|
size_t size = complete_response->result.items.size();
|
|
|
|
complete_response->result.items.reserve(size * (kNumIterations + 1));
|
|
|
|
for (size_t iteration = 0; iteration < kNumIterations; ++iteration) {
|
|
|
|
for (size_t i = 0; i < size; ++i) {
|
|
|
|
auto item = complete_response->result.items[i];
|
|
|
|
item.label += "#" + std::to_string(iteration);
|
|
|
|
complete_response->result.items.push_back(item);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2017-04-15 05:14:05 +00:00
|
|
|
|
2017-06-16 02:28:49 +00:00
|
|
|
const size_t kMaxResultSize = 100u;
|
|
|
|
if (complete_response->result.items.size() > kMaxResultSize) {
|
|
|
|
if (complete_text.empty()) {
|
|
|
|
complete_response->result.items.resize(kMaxResultSize);
|
2017-09-22 01:14:57 +00:00
|
|
|
} else {
|
2017-06-16 02:28:49 +00:00
|
|
|
NonElidedVector<lsCompletionItem> filtered_result;
|
|
|
|
filtered_result.reserve(kMaxResultSize);
|
2017-06-29 04:33:52 +00:00
|
|
|
|
2017-06-29 06:59:38 +00:00
|
|
|
std::unordered_set<std::string> inserted;
|
|
|
|
inserted.reserve(kMaxResultSize);
|
|
|
|
|
2017-09-25 21:33:28 +00:00
|
|
|
// Find literal matches first.
|
2017-06-16 02:28:49 +00:00
|
|
|
for (const lsCompletionItem& item : complete_response->result.items) {
|
2017-06-29 04:33:52 +00:00
|
|
|
if (item.label.find(complete_text) != std::string::npos) {
|
2017-06-29 06:59:38 +00:00
|
|
|
// Don't insert the same completion entry.
|
2017-07-28 02:14:33 +00:00
|
|
|
if (!inserted.insert(item.InsertedContent()).second)
|
2017-06-29 06:59:38 +00:00
|
|
|
continue;
|
|
|
|
|
2017-06-16 02:28:49 +00:00
|
|
|
filtered_result.push_back(item);
|
|
|
|
if (filtered_result.size() >= kMaxResultSize)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2017-04-16 19:02:29 +00:00
|
|
|
|
2017-09-25 21:33:28 +00:00
|
|
|
// Find fuzzy matches if we haven't found all of the literal matches.
|
2017-06-29 04:33:52 +00:00
|
|
|
if (filtered_result.size() < kMaxResultSize) {
|
|
|
|
for (const lsCompletionItem& item : complete_response->result.items) {
|
|
|
|
if (SubstringMatch(complete_text, item.label)) {
|
2017-06-29 06:59:38 +00:00
|
|
|
// Don't insert the same completion entry.
|
|
|
|
if (!inserted.insert(item.InsertedContent()).second)
|
|
|
|
continue;
|
|
|
|
|
2017-06-29 04:33:52 +00:00
|
|
|
filtered_result.push_back(item);
|
|
|
|
if (filtered_result.size() >= kMaxResultSize)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-16 02:28:49 +00:00
|
|
|
complete_response->result.items = filtered_result;
|
2017-09-27 04:44:56 +00:00
|
|
|
}
|
2017-09-22 02:45:17 +00:00
|
|
|
|
2017-09-27 04:44:56 +00:00
|
|
|
// Assuming the client does not support out-of-order completion (ie, ao
|
|
|
|
// matches against oa), our filtering is guaranteed to contain any
|
|
|
|
// potential matches, so the completion is only incomplete if we have the
|
|
|
|
// max number of emitted matches.
|
|
|
|
if (complete_response->result.items.size() >= kMaxResultSize) {
|
|
|
|
LOG_S(INFO) << "Marking completion results as incomplete";
|
|
|
|
complete_response->result.isIncomplete = true;
|
2017-06-16 02:28:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-04-16 19:02:29 +00:00
|
|
|
|
2017-08-15 07:22:13 +00:00
|
|
|
struct Index_Request {
|
|
|
|
std::string path;
|
2017-09-22 01:14:57 +00:00
|
|
|
// TODO: make |args| a string that is parsed lazily.
|
|
|
|
std::vector<std::string> args;
|
2017-08-17 03:39:00 +00:00
|
|
|
bool is_interactive;
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<std::string> contents; // Preloaded contents. Useful for tests.
|
|
|
|
|
|
|
|
Index_Request(const std::string& path,
|
|
|
|
const std::vector<std::string>& args,
|
|
|
|
bool is_interactive,
|
|
|
|
optional<std::string> contents)
|
|
|
|
: path(path),
|
|
|
|
args(args),
|
|
|
|
is_interactive(is_interactive),
|
|
|
|
contents(contents) {}
|
2017-07-30 18:49:24 +00:00
|
|
|
};
|
2017-04-15 05:14:05 +00:00
|
|
|
|
2017-04-08 06:45:28 +00:00
|
|
|
struct Index_DoIdMap {
|
2017-08-15 07:22:13 +00:00
|
|
|
std::unique_ptr<IndexFile> current;
|
|
|
|
std::unique_ptr<IndexFile> previous;
|
|
|
|
|
2017-05-17 07:08:45 +00:00
|
|
|
PerformanceImportFile perf;
|
2017-08-15 07:22:13 +00:00
|
|
|
bool is_interactive = false;
|
2017-08-17 02:06:28 +00:00
|
|
|
bool write_to_disk = false;
|
2017-08-15 07:22:13 +00:00
|
|
|
bool load_previous = false;
|
2017-04-08 06:45:28 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
Index_DoIdMap(std::unique_ptr<IndexFile> current,
|
|
|
|
PerformanceImportFile perf,
|
|
|
|
bool is_interactive,
|
|
|
|
bool write_to_disk)
|
|
|
|
: current(std::move(current)),
|
|
|
|
perf(perf),
|
|
|
|
is_interactive(is_interactive),
|
2017-10-12 15:40:51 +00:00
|
|
|
write_to_disk(write_to_disk) {
|
2017-10-25 01:02:15 +00:00
|
|
|
assert(this->current);
|
2017-10-12 15:40:51 +00:00
|
|
|
}
|
2017-04-08 06:45:28 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct Index_OnIdMapped {
|
2017-08-15 07:22:13 +00:00
|
|
|
struct File {
|
|
|
|
std::unique_ptr<IndexFile> file;
|
|
|
|
std::unique_ptr<IdMap> ids;
|
|
|
|
|
|
|
|
File(std::unique_ptr<IndexFile> file, std::unique_ptr<IdMap> ids)
|
2017-09-22 01:14:57 +00:00
|
|
|
: file(std::move(file)), ids(std::move(ids)) {}
|
2017-08-15 07:22:13 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
std::unique_ptr<File> previous;
|
|
|
|
std::unique_ptr<File> current;
|
2017-08-15 07:46:21 +00:00
|
|
|
|
2017-05-17 07:08:45 +00:00
|
|
|
PerformanceImportFile perf;
|
2017-05-19 07:02:01 +00:00
|
|
|
bool is_interactive;
|
2017-08-17 02:06:28 +00:00
|
|
|
bool write_to_disk;
|
2017-05-16 07:26:26 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
Index_OnIdMapped(PerformanceImportFile perf,
|
|
|
|
bool is_interactive,
|
|
|
|
bool write_to_disk)
|
|
|
|
: perf(perf),
|
|
|
|
is_interactive(is_interactive),
|
|
|
|
write_to_disk(write_to_disk) {}
|
2017-04-08 06:45:28 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct Index_OnIndexed {
|
2017-03-25 19:18:25 +00:00
|
|
|
IndexUpdate update;
|
2017-05-17 07:08:45 +00:00
|
|
|
PerformanceImportFile perf;
|
2017-05-16 07:26:26 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
Index_OnIndexed(IndexUpdate& update, PerformanceImportFile perf)
|
|
|
|
: update(update), perf(perf) {}
|
2017-07-30 04:24:02 +00:00
|
|
|
};
|
|
|
|
|
2017-08-02 03:23:37 +00:00
|
|
|
struct QueueManager {
|
2017-08-15 07:22:13 +00:00
|
|
|
using Index_RequestQueue = ThreadedQueue<Index_Request>;
|
2017-08-02 03:23:37 +00:00
|
|
|
using Index_DoIdMapQueue = ThreadedQueue<Index_DoIdMap>;
|
|
|
|
using Index_OnIdMappedQueue = ThreadedQueue<Index_OnIdMapped>;
|
|
|
|
using Index_OnIndexedQueue = ThreadedQueue<Index_OnIndexed>;
|
|
|
|
|
2017-08-15 07:22:13 +00:00
|
|
|
Index_RequestQueue index_request;
|
2017-08-02 03:23:37 +00:00
|
|
|
Index_DoIdMapQueue do_id_map;
|
2017-08-15 07:22:13 +00:00
|
|
|
Index_DoIdMapQueue load_previous_index;
|
2017-08-02 03:23:37 +00:00
|
|
|
Index_OnIdMappedQueue on_id_mapped;
|
|
|
|
Index_OnIndexedQueue on_indexed;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
QueueManager(MultiQueueWaiter* waiter)
|
|
|
|
: index_request(waiter),
|
|
|
|
do_id_map(waiter),
|
|
|
|
load_previous_index(waiter),
|
|
|
|
on_id_mapped(waiter),
|
|
|
|
on_indexed(waiter) {}
|
2017-09-13 03:35:27 +00:00
|
|
|
|
|
|
|
bool HasWork() {
|
2017-09-22 01:14:57 +00:00
|
|
|
return !index_request.IsEmpty() || !do_id_map.IsEmpty() ||
|
|
|
|
!load_previous_index.IsEmpty() || !on_id_mapped.IsEmpty() ||
|
|
|
|
!on_indexed.IsEmpty();
|
2017-09-13 03:35:27 +00:00
|
|
|
}
|
2017-08-02 03:23:37 +00:00
|
|
|
};
|
2017-03-25 19:18:25 +00:00
|
|
|
|
|
|
|
void RegisterMessageTypes() {
|
2017-03-25 21:02:45 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_CancelRequest>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_InitializeRequest>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_InitializedNotification>();
|
2017-05-03 06:45:10 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_Exit>();
|
2017-03-26 06:47:59 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentDidOpen>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentDidChange>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentDidClose>();
|
2017-04-10 00:08:54 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentDidSave>();
|
2017-04-14 08:21:03 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentRename>();
|
2017-03-26 06:47:59 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentComplete>();
|
2017-05-15 07:28:53 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentSignatureHelp>();
|
2017-04-03 02:21:21 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentDefinition>();
|
2017-04-14 06:43:50 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentDocumentHighlight>();
|
2017-04-14 05:18:02 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentHover>();
|
2017-04-10 05:34:06 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentReferences>();
|
2017-03-25 21:02:45 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentDocumentSymbol>();
|
2017-05-21 04:30:59 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentDocumentLink>();
|
2017-05-20 19:31:07 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentCodeAction>();
|
2017-03-25 21:02:45 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_TextDocumentCodeLens>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_CodeLensResolve>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_WorkspaceSymbol>();
|
2017-04-21 04:50:31 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_CqueryFreshenIndex>();
|
2017-05-24 07:17:29 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_CqueryTypeHierarchyTree>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_CqueryCallTreeInitial>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_CqueryCallTreeExpand>();
|
2017-05-07 06:56:04 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_CqueryVars>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_CqueryCallers>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_CqueryBase>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_CqueryDerived>();
|
2017-09-13 03:35:27 +00:00
|
|
|
MessageRegistry::instance()->Register<Ipc_CqueryIndexFile>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_CqueryQueryDbWaitForIdleIndexer>();
|
|
|
|
MessageRegistry::instance()->Register<Ipc_CqueryExitWhenIdle>();
|
2017-03-12 00:36:00 +00:00
|
|
|
}
|
2017-03-02 09:28:07 +00:00
|
|
|
|
2017-08-17 03:39:00 +00:00
|
|
|
// Manages files inside of the indexing pipeline so we don't have the same file
|
|
|
|
// being imported multiple times.
|
2017-09-14 04:50:36 +00:00
|
|
|
//
|
|
|
|
// NOTE: This is not thread safe and should only be used on the querydb thread.
|
2017-08-17 03:39:00 +00:00
|
|
|
struct ImportManager {
|
2017-09-20 05:08:17 +00:00
|
|
|
// Try to mark the given dependency as imported. A dependency can only ever be
|
|
|
|
// imported once.
|
|
|
|
bool TryMarkDependencyImported(const std::string& path) {
|
|
|
|
std::lock_guard<std::mutex> lock(depdency_mutex_);
|
|
|
|
return depdency_imported_.insert(path).second;
|
|
|
|
}
|
|
|
|
|
2017-09-14 04:50:36 +00:00
|
|
|
// Try to import the given file into querydb. We should only ever be
|
|
|
|
// importing a file into querydb once per file. Returns true if the file
|
|
|
|
// can be imported.
|
|
|
|
bool StartQueryDbImport(const std::string& path) {
|
2017-09-20 05:08:17 +00:00
|
|
|
return querydb_processing_.insert(path).second;
|
2017-08-17 03:39:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// The file has been fully imported and can be imported again later on.
|
2017-09-14 04:50:36 +00:00
|
|
|
void DoneQueryDbImport(const std::string& path) {
|
2017-09-20 05:08:17 +00:00
|
|
|
querydb_processing_.erase(path);
|
2017-08-17 03:39:00 +00:00
|
|
|
}
|
|
|
|
|
2017-09-13 03:35:27 +00:00
|
|
|
// Returns true if there any any files currently being imported.
|
2017-09-22 01:14:57 +00:00
|
|
|
bool HasActiveQuerydbImports() { return !querydb_processing_.empty(); }
|
2017-09-13 03:35:27 +00:00
|
|
|
|
2017-09-20 05:08:17 +00:00
|
|
|
std::unordered_set<std::string> querydb_processing_;
|
|
|
|
|
|
|
|
// TODO: use std::shared_mutex so we can have multiple readers.
|
|
|
|
std::mutex depdency_mutex_;
|
|
|
|
std::unordered_set<std::string> depdency_imported_;
|
2017-08-17 03:39:00 +00:00
|
|
|
};
|
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
// Manages loading caches from file paths for the indexer process.
|
|
|
|
struct CacheLoader {
|
|
|
|
explicit CacheLoader(Config* config) : config_(config) {}
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
IndexFile* TryLoad(const std::string& path) {
|
|
|
|
auto it = caches.find(path);
|
|
|
|
if (it != caches.end())
|
|
|
|
return it->second.get();
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
std::unique_ptr<IndexFile> cache = LoadCachedIndex(config_, path);
|
|
|
|
if (!cache)
|
|
|
|
return nullptr;
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
caches[path] = std::move(cache);
|
|
|
|
return caches[path].get();
|
|
|
|
}
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-10-12 15:40:51 +00:00
|
|
|
// Takes the existing cache or loads the cache at |path|. May return nullptr
|
|
|
|
// if the cache does not exist.
|
2017-08-15 07:22:13 +00:00
|
|
|
std::unique_ptr<IndexFile> TryTakeOrLoad(const std::string& path) {
|
|
|
|
auto it = caches.find(path);
|
|
|
|
if (it != caches.end()) {
|
|
|
|
auto result = std::move(it->second);
|
|
|
|
caches.erase(it);
|
|
|
|
return result;
|
2017-07-30 04:24:02 +00:00
|
|
|
}
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-08-15 07:22:13 +00:00
|
|
|
return LoadCachedIndex(config_, path);
|
2017-07-30 04:24:02 +00:00
|
|
|
}
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-10-12 15:40:51 +00:00
|
|
|
// Takes the existing cache or loads the cache at |path|. Asserts the cache
|
|
|
|
// exists.
|
|
|
|
std::unique_ptr<IndexFile> TakeOrLoad(const std::string& path) {
|
|
|
|
auto result = TryTakeOrLoad(path);
|
|
|
|
assert(result);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2017-08-15 07:22:13 +00:00
|
|
|
std::unordered_map<std::string, std::unique_ptr<IndexFile>> caches;
|
2017-07-30 04:24:02 +00:00
|
|
|
Config* config_;
|
|
|
|
};
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-08-16 03:54:59 +00:00
|
|
|
// Caches timestamps of cc files so we can avoid a filesystem reads. This is
|
|
|
|
// important for import perf, as during dependency checking the same files are
|
|
|
|
// checked over and over again if they are common headers.
|
|
|
|
struct TimestampManager {
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<int64_t> GetLastCachedModificationTime(CacheLoader* cache_loader,
|
|
|
|
const std::string& path) {
|
2017-08-16 03:54:59 +00:00
|
|
|
{
|
|
|
|
std::lock_guard<std::mutex> guard(mutex_);
|
|
|
|
auto it = timestamps_.find(path);
|
|
|
|
if (it != timestamps_.end())
|
|
|
|
return it->second;
|
|
|
|
}
|
|
|
|
IndexFile* file = cache_loader->TryLoad(path);
|
|
|
|
if (!file)
|
|
|
|
return nullopt;
|
|
|
|
|
|
|
|
UpdateCachedModificationTime(path, file->last_modification_time);
|
|
|
|
return file->last_modification_time;
|
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
void UpdateCachedModificationTime(const std::string& path,
|
|
|
|
int64_t timestamp) {
|
2017-08-16 03:54:59 +00:00
|
|
|
std::lock_guard<std::mutex> guard(mutex_);
|
|
|
|
timestamps_[path] = timestamp;
|
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
// TODO: use std::shared_mutex so we can have multiple readers.
|
|
|
|
std::mutex mutex_;
|
2017-08-16 03:54:59 +00:00
|
|
|
std::unordered_map<std::string, int64_t> timestamps_;
|
|
|
|
};
|
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
struct IndexManager {
|
|
|
|
std::unordered_set<std::string> files_being_indexed_;
|
|
|
|
std::mutex mutex_;
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
// Marks a file as being indexed. Returns true if the file is not already
|
|
|
|
// being indexed.
|
|
|
|
bool MarkIndex(const std::string& path) {
|
|
|
|
std::lock_guard<std::mutex> lock(mutex_);
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
return files_being_indexed_.insert(path).second;
|
|
|
|
}
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
// Unmarks a file as being indexed, so it can get indexed again in the
|
|
|
|
// future.
|
|
|
|
void ClearIndex(const std::string& path) {
|
|
|
|
std::lock_guard<std::mutex> lock(mutex_);
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
auto it = files_being_indexed_.find(path);
|
|
|
|
assert(it != files_being_indexed_.end());
|
|
|
|
files_being_indexed_.erase(it);
|
|
|
|
}
|
|
|
|
};
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-11-26 22:20:43 +00:00
|
|
|
// Send indexing progress to client if reporting is enabled.
|
|
|
|
void EmitProgress(Config *config, QueueManager* queue) {
|
|
|
|
if (config->enableProgressReports) {
|
|
|
|
Out_Progress out;
|
|
|
|
out.params.indexRequestCount = queue->index_request.Size();
|
|
|
|
out.params.doIdMapCount = queue->do_id_map.Size();
|
|
|
|
out.params.loadPreviousIndexCount = queue->load_previous_index.Size();
|
|
|
|
out.params.onIdMappedCount = queue->on_id_mapped.Size();
|
|
|
|
out.params.onIndexedCount = queue->on_indexed.Size();
|
|
|
|
|
|
|
|
IpcManager::instance()->SendOutMessageToClient(IpcId::Cout, out);
|
|
|
|
}
|
2017-10-25 07:12:11 +00:00
|
|
|
}
|
|
|
|
|
2017-08-17 02:08:00 +00:00
|
|
|
} // namespace
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-09-22 01:32:55 +00:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
// IMPORT PIPELINE /////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
2017-08-16 05:39:50 +00:00
|
|
|
|
2017-10-12 15:40:51 +00:00
|
|
|
enum class FileParseQuery { NeedsParse, DoesNotNeedParse, NoSuchFile };
|
2017-09-22 02:25:33 +00:00
|
|
|
|
2017-08-15 07:22:13 +00:00
|
|
|
std::vector<Index_DoIdMap> DoParseFile(
|
2017-08-17 03:39:00 +00:00
|
|
|
Config* config,
|
|
|
|
WorkingFiles* working_files,
|
2017-11-11 19:41:09 +00:00
|
|
|
ClangIndex* index,
|
2017-08-17 03:39:00 +00:00
|
|
|
FileConsumer::SharedState* file_consumer_shared,
|
|
|
|
TimestampManager* timestamp_manager,
|
2017-09-20 05:08:17 +00:00
|
|
|
ImportManager* import_manager,
|
2017-08-17 03:39:00 +00:00
|
|
|
CacheLoader* cache_loader,
|
|
|
|
bool is_interactive,
|
|
|
|
const std::string& path,
|
2017-09-13 03:35:27 +00:00
|
|
|
const std::vector<std::string>& args,
|
|
|
|
const optional<FileContents>& contents) {
|
2017-08-15 07:22:13 +00:00
|
|
|
std::vector<Index_DoIdMap> result;
|
2017-08-15 05:53:44 +00:00
|
|
|
|
2017-09-24 00:36:28 +00:00
|
|
|
// Always run this block, even if we are interactive, so we can check
|
|
|
|
// dependencies and reset files in |file_consumer_shared|.
|
2017-08-15 05:53:44 +00:00
|
|
|
IndexFile* previous_index = cache_loader->TryLoad(path);
|
2017-09-24 00:36:28 +00:00
|
|
|
if (previous_index) {
|
2017-09-20 05:08:17 +00:00
|
|
|
// If none of the dependencies have changed and the index is not
|
|
|
|
// interactive (ie, requested by a file save), skip parsing and just load
|
|
|
|
// from cache.
|
2017-09-14 06:39:32 +00:00
|
|
|
|
|
|
|
// Checks if |path| needs to be reparsed. This will modify cached state
|
|
|
|
// such that calling this function twice with the same path may return true
|
|
|
|
// the first time but will return false the second.
|
2017-09-20 05:08:17 +00:00
|
|
|
auto file_needs_parse = [&](const std::string& path, bool is_dependency) {
|
|
|
|
// If the file is a dependency but another file as already imported it,
|
|
|
|
// don't bother.
|
2017-09-24 00:36:28 +00:00
|
|
|
if (!is_interactive && is_dependency &&
|
|
|
|
!import_manager->TryMarkDependencyImported(path)) {
|
2017-09-20 05:08:17 +00:00
|
|
|
return FileParseQuery::DoesNotNeedParse;
|
2017-09-24 00:36:28 +00:00
|
|
|
}
|
2017-09-20 05:08:17 +00:00
|
|
|
|
2017-08-16 05:39:50 +00:00
|
|
|
optional<int64_t> modification_timestamp = GetLastModificationTime(path);
|
2017-10-12 15:40:51 +00:00
|
|
|
|
|
|
|
// Cannot find file.
|
2017-08-16 05:39:50 +00:00
|
|
|
if (!modification_timestamp)
|
2017-10-12 15:40:51 +00:00
|
|
|
return FileParseQuery::NoSuchFile;
|
2017-08-16 05:39:50 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<int64_t> last_cached_modification =
|
|
|
|
timestamp_manager->GetLastCachedModificationTime(cache_loader, path);
|
2017-08-16 03:54:59 +00:00
|
|
|
|
2017-10-12 15:40:51 +00:00
|
|
|
// File has been changed.
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!last_cached_modification ||
|
|
|
|
modification_timestamp != *last_cached_modification) {
|
2017-08-15 05:53:44 +00:00
|
|
|
file_consumer_shared->Reset(path);
|
2017-09-22 01:14:57 +00:00
|
|
|
timestamp_manager->UpdateCachedModificationTime(
|
|
|
|
path, *modification_timestamp);
|
2017-08-16 05:45:42 +00:00
|
|
|
return FileParseQuery::NeedsParse;
|
2017-08-15 05:53:44 +00:00
|
|
|
}
|
2017-10-12 15:40:51 +00:00
|
|
|
|
|
|
|
// File has not changed, do not parse it.
|
2017-08-16 05:45:42 +00:00
|
|
|
return FileParseQuery::DoesNotNeedParse;
|
2017-08-15 05:53:44 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
// Check timestamps and update |file_consumer_shared|.
|
2017-09-20 05:08:17 +00:00
|
|
|
FileParseQuery path_state = file_needs_parse(path, false /*is_dependency*/);
|
2017-10-12 15:40:51 +00:00
|
|
|
|
|
|
|
// Target file does not exist on disk, do not emit any indexes.
|
|
|
|
// TODO: Dependencies should be reassigned to other files. We can do this by
|
|
|
|
// updating the "primary_file" if it doesn't exist. Might not actually be a
|
|
|
|
// problem in practice.
|
|
|
|
if (path_state == FileParseQuery::NoSuchFile)
|
2017-08-16 05:39:50 +00:00
|
|
|
return result;
|
2017-10-12 15:40:51 +00:00
|
|
|
|
2017-09-24 00:36:28 +00:00
|
|
|
bool needs_reparse =
|
|
|
|
is_interactive || path_state == FileParseQuery::NeedsParse;
|
2017-08-16 05:39:50 +00:00
|
|
|
|
2017-08-15 05:53:44 +00:00
|
|
|
for (const std::string& dependency : previous_index->dependencies) {
|
2017-08-16 03:29:49 +00:00
|
|
|
assert(!dependency.empty());
|
|
|
|
|
2017-10-12 15:40:51 +00:00
|
|
|
// note: Use != as there are multiple failure results for FileParseQuery.
|
|
|
|
if (file_needs_parse(dependency, true /*is_dependency*/) !=
|
|
|
|
FileParseQuery::DoesNotNeedParse) {
|
2017-09-22 01:14:57 +00:00
|
|
|
LOG_S(INFO) << "Timestamp has changed for " << dependency << " (via "
|
|
|
|
<< previous_index->path << ")";
|
2017-08-15 05:53:44 +00:00
|
|
|
needs_reparse = true;
|
|
|
|
// SUBTLE: Do not break here, as |file_consumer_shared| is updated
|
|
|
|
// inside of |file_needs_parse|.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// No timestamps changed - load directly from cache.
|
|
|
|
if (!needs_reparse) {
|
|
|
|
LOG_S(INFO) << "Skipping parse; no timestamp change for " << path;
|
|
|
|
|
|
|
|
// TODO/FIXME: real perf
|
|
|
|
PerformanceImportFile perf;
|
2017-10-12 15:40:51 +00:00
|
|
|
result.push_back(Index_DoIdMap(cache_loader->TakeOrLoad(path), perf,
|
2017-09-22 01:14:57 +00:00
|
|
|
is_interactive, false /*write_to_disk*/));
|
2017-08-15 05:53:44 +00:00
|
|
|
for (const std::string& dependency : previous_index->dependencies) {
|
2017-10-12 15:40:51 +00:00
|
|
|
// Only load a dependency if it is not already loaded.
|
|
|
|
//
|
|
|
|
// This is important for perf in large projects where there are lots of
|
|
|
|
// dependencies shared between many files.
|
2017-08-15 07:22:13 +00:00
|
|
|
if (!file_consumer_shared->Mark(dependency))
|
|
|
|
continue;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
LOG_S(INFO) << "Emitting index result for " << dependency << " (via "
|
|
|
|
<< previous_index->path << ")";
|
2017-10-12 15:40:51 +00:00
|
|
|
|
|
|
|
std::unique_ptr<IndexFile> dependency_index =
|
|
|
|
cache_loader->TryTakeOrLoad(dependency);
|
|
|
|
|
|
|
|
// |dependency_index| may be null if there is no cache for it but
|
|
|
|
// another file has already started importing it.
|
|
|
|
if (!dependency_index)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
result.push_back(Index_DoIdMap(std::move(dependency_index), perf,
|
|
|
|
is_interactive,
|
2017-09-22 01:14:57 +00:00
|
|
|
false /*write_to_disk*/));
|
2017-08-15 05:53:44 +00:00
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
LOG_S(INFO) << "Parsing " << path;
|
|
|
|
|
|
|
|
// Load file contents for all dependencies into memory. If the dependencies
|
|
|
|
// for the file changed we may not end up using all of the files we
|
|
|
|
// preloaded. If a new dependency was added the indexer will grab the file
|
|
|
|
// contents as soon as possible.
|
|
|
|
//
|
|
|
|
// We do this to minimize the race between indexing a file and capturing the
|
|
|
|
// file contents.
|
|
|
|
//
|
|
|
|
// TODO: We might be able to optimize perf by only copying for files in
|
|
|
|
// working_files. We can pass that same set of files to the indexer as
|
|
|
|
// well. We then default to a fast file-copy if not in working set.
|
2017-08-16 05:39:50 +00:00
|
|
|
bool loaded_primary = false;
|
2017-08-15 05:53:44 +00:00
|
|
|
std::vector<FileContents> file_contents;
|
2017-09-13 03:35:27 +00:00
|
|
|
if (contents) {
|
|
|
|
loaded_primary = loaded_primary || contents->path == path;
|
|
|
|
file_contents.push_back(*contents);
|
|
|
|
}
|
2017-08-15 05:53:44 +00:00
|
|
|
for (const auto& it : cache_loader->caches) {
|
|
|
|
const std::unique_ptr<IndexFile>& index = it.second;
|
|
|
|
assert(index);
|
|
|
|
optional<std::string> index_content = ReadContent(index->path);
|
|
|
|
if (!index_content) {
|
|
|
|
LOG_S(ERROR) << "Failed to preload index content for " << index->path;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
file_contents.push_back(FileContents(index->path, *index_content));
|
2017-08-16 05:39:50 +00:00
|
|
|
|
|
|
|
loaded_primary = loaded_primary || index->path == path;
|
|
|
|
}
|
|
|
|
if (!loaded_primary) {
|
|
|
|
optional<std::string> content = ReadContent(path);
|
|
|
|
if (!content) {
|
|
|
|
LOG_S(ERROR) << "Skipping index (file cannot be found): " << path;
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
file_contents.push_back(FileContents(path, *content));
|
2017-08-15 05:53:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
PerformanceImportFile perf;
|
|
|
|
std::vector<std::unique_ptr<IndexFile>> indexes = Parse(
|
2017-09-22 01:14:57 +00:00
|
|
|
config, file_consumer_shared, path, args, file_contents, &perf, index);
|
2017-08-15 05:53:44 +00:00
|
|
|
for (std::unique_ptr<IndexFile>& new_index : indexes) {
|
|
|
|
Timer time;
|
|
|
|
|
2017-10-12 15:40:51 +00:00
|
|
|
// Only emit diagnostics for non-interactive sessions, which makes it easier
|
|
|
|
// to identify indexing problems. For interactive sessions, diagnostics are
|
|
|
|
// handled by code completion.
|
|
|
|
if (!is_interactive)
|
|
|
|
EmitDiagnostics(working_files, new_index->path, new_index->diagnostics_);
|
2017-10-28 22:09:14 +00:00
|
|
|
|
2017-09-22 02:25:33 +00:00
|
|
|
// When main thread does IdMap request it will request the previous index if
|
|
|
|
// needed.
|
2017-08-15 05:53:44 +00:00
|
|
|
LOG_S(INFO) << "Emitting index result for " << new_index->path;
|
2017-09-22 01:14:57 +00:00
|
|
|
result.push_back(Index_DoIdMap(std::move(new_index), perf, is_interactive,
|
|
|
|
true /*write_to_disk*/));
|
2017-08-15 05:53:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2017-09-27 06:03:43 +00:00
|
|
|
// Index a file using an already-parsed translation unit from code completion.
|
|
|
|
// Since most of the time for indexing a file comes from parsing, we can do
|
|
|
|
// real-time indexing.
|
|
|
|
// TODO: add option to disable this.
|
|
|
|
void IndexWithTuFromCodeCompletion(
|
|
|
|
QueueManager* queue,
|
|
|
|
FileConsumer::SharedState* file_consumer_shared,
|
2017-11-11 19:41:09 +00:00
|
|
|
ClangTranslationUnit* tu,
|
2017-09-27 06:03:43 +00:00
|
|
|
const std::vector<CXUnsavedFile>& file_contents,
|
|
|
|
const std::string& path,
|
|
|
|
const std::vector<std::string>& args) {
|
|
|
|
file_consumer_shared->Reset(path);
|
|
|
|
|
|
|
|
PerformanceImportFile perf;
|
2017-11-11 19:41:09 +00:00
|
|
|
ClangIndex index;
|
2017-09-27 06:03:43 +00:00
|
|
|
std::vector<std::unique_ptr<IndexFile>> indexes = ParseWithTu(
|
|
|
|
file_consumer_shared, &perf, tu, &index, path, args, file_contents);
|
|
|
|
|
|
|
|
std::vector<Index_DoIdMap> result;
|
|
|
|
for (std::unique_ptr<IndexFile>& new_index : indexes) {
|
|
|
|
Timer time;
|
|
|
|
|
|
|
|
// When main thread does IdMap request it will request the previous index if
|
|
|
|
// needed.
|
|
|
|
LOG_S(INFO) << "Emitting index result for " << new_index->path;
|
|
|
|
result.push_back(Index_DoIdMap(std::move(new_index), perf,
|
|
|
|
true /*is_interactive*/,
|
|
|
|
true /*write_to_disk*/));
|
|
|
|
}
|
|
|
|
|
|
|
|
LOG_IF_S(WARNING, result.size() > 1)
|
|
|
|
<< "Code completion index update generated more than one index";
|
|
|
|
|
|
|
|
queue->do_id_map.EnqueueAll(std::move(result));
|
|
|
|
}
|
|
|
|
|
2017-08-15 07:22:13 +00:00
|
|
|
std::vector<Index_DoIdMap> ParseFile(
|
2017-08-15 05:53:44 +00:00
|
|
|
Config* config,
|
2017-08-17 03:39:00 +00:00
|
|
|
WorkingFiles* working_files,
|
2017-11-11 19:41:09 +00:00
|
|
|
ClangIndex* index,
|
2017-08-15 05:53:44 +00:00
|
|
|
FileConsumer::SharedState* file_consumer_shared,
|
2017-08-16 03:54:59 +00:00
|
|
|
TimestampManager* timestamp_manager,
|
2017-09-20 05:08:17 +00:00
|
|
|
ImportManager* import_manager,
|
2017-08-17 03:39:00 +00:00
|
|
|
bool is_interactive,
|
2017-09-13 03:35:27 +00:00
|
|
|
const Project::Entry& entry,
|
|
|
|
const optional<std::string>& contents) {
|
|
|
|
optional<FileContents> file_contents;
|
|
|
|
if (contents)
|
|
|
|
file_contents = FileContents(entry.filename, *contents);
|
2017-08-15 05:53:44 +00:00
|
|
|
|
|
|
|
CacheLoader cache_loader(config);
|
|
|
|
|
|
|
|
// Try to determine the original import file by loading the file from cache.
|
|
|
|
// This lets the user request an index on a header file, which clang will
|
|
|
|
// complain about if indexed by itself.
|
|
|
|
IndexFile* entry_cache = cache_loader.TryLoad(entry.filename);
|
|
|
|
std::string tu_path = entry_cache ? entry_cache->import_file : entry.filename;
|
2017-09-22 01:14:57 +00:00
|
|
|
return DoParseFile(config, working_files, index, file_consumer_shared,
|
|
|
|
timestamp_manager, import_manager, &cache_loader,
|
|
|
|
is_interactive, tu_path, entry.args, file_contents);
|
2017-08-15 05:53:44 +00:00
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
bool IndexMain_DoParse(Config* config,
|
|
|
|
WorkingFiles* working_files,
|
|
|
|
QueueManager* queue,
|
|
|
|
FileConsumer::SharedState* file_consumer_shared,
|
|
|
|
TimestampManager* timestamp_manager,
|
|
|
|
ImportManager* import_manager,
|
2017-11-11 19:41:09 +00:00
|
|
|
ClangIndex* index) {
|
2017-09-14 04:50:36 +00:00
|
|
|
optional<Index_Request> request = queue->index_request.TryDequeue();
|
|
|
|
if (!request)
|
2017-08-17 03:39:00 +00:00
|
|
|
return false;
|
|
|
|
|
2017-08-15 05:53:44 +00:00
|
|
|
Project::Entry entry;
|
2017-08-16 03:29:49 +00:00
|
|
|
entry.filename = request->path;
|
|
|
|
entry.args = request->args;
|
2017-09-22 01:14:57 +00:00
|
|
|
std::vector<Index_DoIdMap> responses = ParseFile(
|
|
|
|
config, working_files, index, file_consumer_shared, timestamp_manager,
|
|
|
|
import_manager, request->is_interactive, entry, request->contents);
|
2017-09-13 03:35:27 +00:00
|
|
|
|
|
|
|
// Don't bother sending an IdMap request if there are no responses.
|
2017-08-17 02:14:54 +00:00
|
|
|
if (responses.empty())
|
|
|
|
return false;
|
2017-08-15 05:53:44 +00:00
|
|
|
|
2017-08-17 02:14:54 +00:00
|
|
|
// EnqueueAll will clear |responses|.
|
|
|
|
queue->do_id_map.EnqueueAll(std::move(responses));
|
|
|
|
return true;
|
2017-08-15 05:53:44 +00:00
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
bool IndexMain_DoCreateIndexUpdate(Config* config,
|
|
|
|
QueueManager* queue,
|
|
|
|
TimestampManager* timestamp_manager) {
|
2017-08-02 03:23:37 +00:00
|
|
|
optional<Index_OnIdMapped> response = queue->on_id_mapped.TryDequeue();
|
2017-07-30 04:24:02 +00:00
|
|
|
if (!response)
|
|
|
|
return false;
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
Timer time;
|
2017-08-02 03:23:37 +00:00
|
|
|
|
2017-07-30 18:49:24 +00:00
|
|
|
IdMap* previous_id_map = nullptr;
|
|
|
|
IndexFile* previous_index = nullptr;
|
|
|
|
if (response->previous) {
|
|
|
|
previous_id_map = response->previous->ids.get();
|
|
|
|
previous_index = response->previous->file.get();
|
|
|
|
}
|
|
|
|
|
2017-08-15 07:46:21 +00:00
|
|
|
// Build delta update.
|
2017-09-22 01:14:57 +00:00
|
|
|
IndexUpdate update =
|
|
|
|
IndexUpdate::CreateDelta(previous_id_map, response->current->ids.get(),
|
|
|
|
previous_index, response->current->file.get());
|
2017-07-30 04:24:02 +00:00
|
|
|
response->perf.index_make_delta = time.ElapsedMicrosecondsAndReset();
|
2017-09-24 00:36:28 +00:00
|
|
|
LOG_S(INFO) << "Built index update for " << response->current->file->path
|
|
|
|
<< " (is_delta=" << !!response->previous << ")";
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-08-17 02:06:28 +00:00
|
|
|
// Write current index to disk if requested.
|
|
|
|
if (response->write_to_disk) {
|
2017-09-22 01:14:57 +00:00
|
|
|
LOG_S(INFO) << "Writing cached index to disk for "
|
|
|
|
<< response->current->file->path;
|
2017-08-15 07:46:21 +00:00
|
|
|
time.Reset();
|
|
|
|
WriteToCache(config, *response->current->file);
|
|
|
|
response->perf.index_save_to_disk = time.ElapsedMicrosecondsAndReset();
|
2017-09-22 01:14:57 +00:00
|
|
|
timestamp_manager->UpdateCachedModificationTime(
|
|
|
|
response->current->file->path,
|
|
|
|
response->current->file->last_modification_time);
|
2017-08-15 07:46:21 +00:00
|
|
|
}
|
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
#if false
|
2017-09-22 01:14:57 +00:00
|
|
|
#define PRINT_SECTION(name) \
|
|
|
|
if (response->perf.name) { \
|
|
|
|
total += response->perf.name; \
|
2017-07-30 04:24:02 +00:00
|
|
|
output << " " << #name << ": " << FormatMicroseconds(response->perf.name); \
|
|
|
|
}
|
|
|
|
std::stringstream output;
|
|
|
|
long long total = 0;
|
|
|
|
output << "[perf]";
|
|
|
|
PRINT_SECTION(index_parse);
|
|
|
|
PRINT_SECTION(index_build);
|
|
|
|
PRINT_SECTION(index_save_to_disk);
|
|
|
|
PRINT_SECTION(index_load_cached);
|
|
|
|
PRINT_SECTION(querydb_id_map);
|
|
|
|
PRINT_SECTION(index_make_delta);
|
|
|
|
output << "\n total: " << FormatMicroseconds(total);
|
|
|
|
output << " path: " << response->current_index->path;
|
|
|
|
output << std::endl;
|
|
|
|
std::cerr << output.rdbuf();
|
|
|
|
#undef PRINT_SECTION
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
if (response->is_interactive)
|
|
|
|
std::cerr << "Applying IndexUpdate" << std::endl << update.ToString() << std::endl;
|
|
|
|
#endif
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
Index_OnIndexed reply(update, response->perf);
|
2017-08-02 03:23:37 +00:00
|
|
|
queue->on_indexed.Enqueue(std::move(reply));
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
return true;
|
|
|
|
}
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-08-15 07:22:13 +00:00
|
|
|
bool IndexMain_LoadPreviousIndex(Config* config, QueueManager* queue) {
|
|
|
|
optional<Index_DoIdMap> response = queue->load_previous_index.TryDequeue();
|
|
|
|
if (!response)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
response->previous = LoadCachedIndex(config, response->current->path);
|
2017-09-22 01:14:57 +00:00
|
|
|
LOG_IF_S(ERROR, !response->previous)
|
|
|
|
<< "Unable to load previous index for already imported index "
|
|
|
|
<< response->current->path;
|
2017-08-15 07:22:13 +00:00
|
|
|
|
|
|
|
queue->do_id_map.Enqueue(std::move(*response));
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2017-08-02 03:23:37 +00:00
|
|
|
bool IndexMergeIndexUpdates(QueueManager* queue) {
|
|
|
|
optional<Index_OnIndexed> root = queue->on_indexed.TryDequeue();
|
2017-07-30 04:24:02 +00:00
|
|
|
if (!root)
|
|
|
|
return false;
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
bool did_merge = false;
|
|
|
|
while (true) {
|
2017-08-02 03:23:37 +00:00
|
|
|
optional<Index_OnIndexed> to_join = queue->on_indexed.TryDequeue();
|
2017-07-30 04:24:02 +00:00
|
|
|
if (!to_join) {
|
2017-08-02 03:23:37 +00:00
|
|
|
queue->on_indexed.Enqueue(std::move(*root));
|
2017-07-30 04:24:02 +00:00
|
|
|
return did_merge;
|
|
|
|
}
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
did_merge = true;
|
2017-08-15 07:22:13 +00:00
|
|
|
Timer time;
|
2017-07-30 04:24:02 +00:00
|
|
|
root->update.Merge(to_join->update);
|
2017-09-27 06:03:43 +00:00
|
|
|
// time.ResetAndPrint("Joined querydb updates for files: " +
|
|
|
|
// StringJoinMap(root->update.files_def_update,
|
|
|
|
//[](const QueryFile::DefUpdate& update) {
|
|
|
|
// return update.path;
|
|
|
|
//}));
|
2017-07-30 04:24:02 +00:00
|
|
|
}
|
|
|
|
}
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkThread::Result IndexMain(Config* config,
|
|
|
|
FileConsumer::SharedState* file_consumer_shared,
|
|
|
|
TimestampManager* timestamp_manager,
|
|
|
|
ImportManager* import_manager,
|
|
|
|
Project* project,
|
|
|
|
WorkingFiles* working_files,
|
|
|
|
MultiQueueWaiter* waiter,
|
|
|
|
QueueManager* queue) {
|
2017-11-26 22:20:43 +00:00
|
|
|
EmitProgress(config, queue);
|
2017-10-25 07:12:11 +00:00
|
|
|
|
2017-07-30 04:24:02 +00:00
|
|
|
// TODO: dispose of index after it is not used for a while.
|
2017-11-11 19:41:09 +00:00
|
|
|
ClangIndex index;
|
2017-04-23 20:02:41 +00:00
|
|
|
|
2017-09-13 03:35:27 +00:00
|
|
|
// TODO: process all off IndexMain_DoIndex before calling
|
|
|
|
// IndexMain_DoCreateIndexUpdate for
|
|
|
|
// better icache behavior. We need to have some threads spinning on
|
|
|
|
// both though
|
|
|
|
// otherwise memory usage will get bad.
|
|
|
|
|
|
|
|
// We need to make sure to run both IndexMain_DoParse and
|
|
|
|
// IndexMain_DoCreateIndexUpdate so we don't starve querydb from doing any
|
|
|
|
// work. Running both also lets the user query the partially constructed
|
|
|
|
// index.
|
2017-09-22 01:14:57 +00:00
|
|
|
bool did_parse =
|
|
|
|
IndexMain_DoParse(config, working_files, queue, file_consumer_shared,
|
|
|
|
timestamp_manager, import_manager, &index);
|
2017-09-13 03:35:27 +00:00
|
|
|
|
|
|
|
bool did_create_update =
|
|
|
|
IndexMain_DoCreateIndexUpdate(config, queue, timestamp_manager);
|
|
|
|
|
|
|
|
bool did_load_previous = IndexMain_LoadPreviousIndex(config, queue);
|
|
|
|
|
|
|
|
// Nothing to index and no index updates to create, so join some already
|
|
|
|
// created index updates to reduce work on querydb thread.
|
|
|
|
bool did_merge = false;
|
|
|
|
if (!did_parse && !did_create_update && !did_load_previous)
|
|
|
|
did_merge = IndexMergeIndexUpdates(queue);
|
|
|
|
|
|
|
|
// We didn't do any work, so wait for a notification.
|
|
|
|
if (!did_parse && !did_create_update && !did_merge && !did_load_previous) {
|
2017-09-22 01:14:57 +00:00
|
|
|
waiter->Wait({&queue->index_request, &queue->on_id_mapped,
|
|
|
|
&queue->load_previous_index, &queue->on_indexed});
|
2017-07-30 04:24:02 +00:00
|
|
|
}
|
2017-09-13 03:35:27 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
return queue->HasWork() ? WorkThread::Result::MoreWork
|
|
|
|
: WorkThread::Result::NoWork;
|
2017-07-30 04:24:02 +00:00
|
|
|
}
|
2017-04-16 19:02:29 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
bool QueryDb_ImportMain(Config* config,
|
|
|
|
QueryDatabase* db,
|
|
|
|
ImportManager* import_manager,
|
|
|
|
QueueManager* queue,
|
|
|
|
WorkingFiles* working_files) {
|
2017-11-26 22:20:43 +00:00
|
|
|
EmitProgress(config, queue);
|
2017-10-25 07:12:11 +00:00
|
|
|
|
2017-08-16 03:29:49 +00:00
|
|
|
bool did_work = false;
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
optional<Index_DoIdMap> request = queue->do_id_map.TryDequeue();
|
|
|
|
if (!request)
|
|
|
|
break;
|
|
|
|
did_work = true;
|
|
|
|
|
2017-10-12 15:40:51 +00:00
|
|
|
assert(request->current);
|
|
|
|
|
2017-08-16 03:29:49 +00:00
|
|
|
// If the request does not have previous state and we have already imported
|
|
|
|
// it, load the previous state from disk and rerun IdMap logic later. Do not
|
|
|
|
// do this if we have already attempted in the past.
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!request->load_previous && !request->previous &&
|
|
|
|
db->usr_to_file.find(LowerPathIfCaseInsensitive(
|
|
|
|
request->current->path)) != db->usr_to_file.end()) {
|
2017-08-16 03:29:49 +00:00
|
|
|
assert(!request->load_previous);
|
|
|
|
request->load_previous = true;
|
|
|
|
queue->load_previous_index.Enqueue(std::move(*request));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2017-09-24 00:36:28 +00:00
|
|
|
// Check if the file is already being imported into querydb. If it is, drop
|
|
|
|
// the request.
|
|
|
|
//
|
|
|
|
// Note, we must do this *after* we have checked for the previous index,
|
|
|
|
// otherwise we will never actually generate the IdMap.
|
|
|
|
if (!import_manager->StartQueryDbImport(request->current->path)) {
|
|
|
|
LOG_S(INFO) << "Dropping index as it is already being imported for "
|
|
|
|
<< request->current->path;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
Index_OnIdMapped response(request->perf, request->is_interactive,
|
|
|
|
request->write_to_disk);
|
2017-08-16 03:29:49 +00:00
|
|
|
Timer time;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
auto make_map = [db](std::unique_ptr<IndexFile> file)
|
|
|
|
-> std::unique_ptr<Index_OnIdMapped::File> {
|
2017-08-16 03:29:49 +00:00
|
|
|
if (!file)
|
|
|
|
return nullptr;
|
|
|
|
|
|
|
|
auto id_map = MakeUnique<IdMap>(db, file->id_cache);
|
2017-09-22 01:14:57 +00:00
|
|
|
return MakeUnique<Index_OnIdMapped::File>(std::move(file),
|
|
|
|
std::move(id_map));
|
2017-08-16 03:29:49 +00:00
|
|
|
};
|
|
|
|
response.current = make_map(std::move(request->current));
|
|
|
|
response.previous = make_map(std::move(request->previous));
|
|
|
|
response.perf.querydb_id_map = time.ElapsedMicrosecondsAndReset();
|
|
|
|
|
|
|
|
queue->on_id_mapped.Enqueue(std::move(response));
|
|
|
|
}
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
optional<Index_OnIndexed> response = queue->on_indexed.TryDequeue();
|
|
|
|
if (!response)
|
|
|
|
break;
|
|
|
|
|
|
|
|
did_work = true;
|
|
|
|
|
|
|
|
Timer time;
|
|
|
|
|
|
|
|
for (auto& updated_file : response->update.files_def_update) {
|
|
|
|
// TODO: We're reading a file on querydb thread. This is slow!! If this
|
|
|
|
// a real problem in practice we can load the file in a previous stage.
|
|
|
|
// It should be fine though because we only do it if the user has the
|
|
|
|
// file open.
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
|
|
|
working_files->GetFileByFilename(updated_file.path);
|
2017-08-16 03:29:49 +00:00
|
|
|
if (working_file) {
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<std::string> cached_file_contents =
|
|
|
|
LoadCachedFileContents(config, updated_file.path);
|
2017-08-16 03:29:49 +00:00
|
|
|
if (cached_file_contents)
|
|
|
|
working_file->SetIndexContent(*cached_file_contents);
|
|
|
|
else
|
|
|
|
working_file->SetIndexContent(working_file->buffer_content);
|
2017-09-22 01:14:57 +00:00
|
|
|
time.ResetAndPrint(
|
|
|
|
"Update WorkingFile index contents (via disk load) for " +
|
|
|
|
updated_file.path);
|
2017-10-28 22:09:14 +00:00
|
|
|
|
|
|
|
// Update inactive region.
|
|
|
|
EmitInactiveLines(working_file, updated_file.inactive_regions);
|
2017-08-16 03:29:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-16 03:54:59 +00:00
|
|
|
time.Reset();
|
2017-08-16 03:29:49 +00:00
|
|
|
db->ApplyIndexUpdate(&response->update);
|
2017-09-22 01:14:57 +00:00
|
|
|
time.ResetAndPrint("Applying index update for " +
|
|
|
|
StringJoinMap(response->update.files_def_update,
|
|
|
|
[](const QueryFile::DefUpdate& value) {
|
|
|
|
return value.path;
|
|
|
|
}));
|
2017-09-24 00:36:28 +00:00
|
|
|
|
2017-11-09 07:06:32 +00:00
|
|
|
// Update semantic highlighting.
|
|
|
|
for (auto& updated_file : response->update.files_def_update) {
|
|
|
|
WorkingFile* working_file =
|
|
|
|
working_files->GetFileByFilename(updated_file.path);
|
|
|
|
if (working_file) {
|
|
|
|
QueryFileId file_id =
|
|
|
|
db->usr_to_file[LowerPathIfCaseInsensitive(working_file->filename)];
|
|
|
|
QueryFile* file = &db->files[file_id.id];
|
|
|
|
EmitSemanticHighlighting(db, working_file, file);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-24 00:36:28 +00:00
|
|
|
// Mark the files as being done in querydb stage after we apply the index
|
|
|
|
// update.
|
|
|
|
for (auto& updated_file : response->update.files_def_update)
|
|
|
|
import_manager->DoneQueryDbImport(updated_file.path);
|
2017-08-16 03:29:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return did_work;
|
|
|
|
}
|
2017-08-15 02:07:46 +00:00
|
|
|
|
2017-09-22 01:32:55 +00:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
// QUERYDB MAIN ////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
2017-09-22 01:14:57 +00:00
|
|
|
|
|
|
|
bool QueryDbMainLoop(Config* config,
|
|
|
|
QueryDatabase* db,
|
|
|
|
bool* exit_when_idle,
|
|
|
|
MultiQueueWaiter* waiter,
|
|
|
|
QueueManager* queue,
|
|
|
|
Project* project,
|
|
|
|
FileConsumer::SharedState* file_consumer_shared,
|
|
|
|
ImportManager* import_manager,
|
|
|
|
TimestampManager* timestamp_manager,
|
|
|
|
WorkingFiles* working_files,
|
|
|
|
ClangCompleteManager* clang_complete,
|
|
|
|
IncludeComplete* include_complete,
|
|
|
|
CodeCompleteCache* global_code_complete_cache,
|
|
|
|
CodeCompleteCache* non_global_code_complete_cache,
|
|
|
|
CodeCompleteCache* signature_cache) {
|
2017-04-16 21:49:48 +00:00
|
|
|
IpcManager* ipc = IpcManager::instance();
|
2017-03-26 21:40:34 +00:00
|
|
|
|
2017-04-23 22:45:40 +00:00
|
|
|
bool did_work = false;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
std::vector<std::unique_ptr<BaseIpcMessage>> messages =
|
|
|
|
ipc->GetMessages(IpcManager::Destination::Server);
|
2017-03-05 19:48:05 +00:00
|
|
|
for (auto& message : messages) {
|
2017-04-23 22:45:40 +00:00
|
|
|
did_work = true;
|
2017-03-05 19:48:05 +00:00
|
|
|
|
2017-03-25 19:18:25 +00:00
|
|
|
switch (message->method_id) {
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::Initialize: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto request = message->As<Ipc_InitializeRequest>();
|
2017-04-26 01:32:45 +00:00
|
|
|
|
|
|
|
// Log initialization parameters.
|
|
|
|
rapidjson::StringBuffer output;
|
|
|
|
Writer writer(output);
|
|
|
|
Reflect(writer, request->params.initializationOptions);
|
|
|
|
std::cerr << output.GetString() << std::endl;
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
if (request->params.rootUri) {
|
|
|
|
std::string project_path = request->params.rootUri->GetPath();
|
2017-07-28 02:14:33 +00:00
|
|
|
LOG_S(INFO) << "[querydb] Initialize in directory " << project_path
|
2017-09-22 01:14:57 +00:00
|
|
|
<< " with uri " << request->params.rootUri->raw_uri;
|
2017-04-23 20:19:09 +00:00
|
|
|
|
|
|
|
if (!request->params.initializationOptions) {
|
2017-09-22 01:14:57 +00:00
|
|
|
LOG_S(FATAL) << "Initialization parameters (particularily "
|
|
|
|
"cacheDirectory) are required";
|
2017-04-23 20:19:09 +00:00
|
|
|
exit(1);
|
|
|
|
}
|
2017-03-05 19:48:05 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
*config = *request->params.initializationOptions;
|
2017-03-15 07:14:44 +00:00
|
|
|
|
2017-05-20 19:31:07 +00:00
|
|
|
// Check client version.
|
2017-09-13 03:35:27 +00:00
|
|
|
if (config->clientVersion != kExpectedClientVersion &&
|
|
|
|
config->clientVersion != -1 /*disable check*/) {
|
2017-05-20 19:31:07 +00:00
|
|
|
Out_ShowLogMessage out;
|
|
|
|
out.display_type = Out_ShowLogMessage::DisplayType::Show;
|
|
|
|
out.params.type = lsMessageType::Error;
|
2017-09-22 01:14:57 +00:00
|
|
|
out.params.message =
|
|
|
|
"cquery client (v" + std::to_string(config->clientVersion) +
|
|
|
|
") and server (v" + std::to_string(kExpectedClientVersion) +
|
|
|
|
") version mismatch. Please update ";
|
2017-05-20 19:31:07 +00:00
|
|
|
if (config->clientVersion > kExpectedClientVersion)
|
|
|
|
out.params.message += "the cquery binary.";
|
|
|
|
else
|
2017-09-22 01:14:57 +00:00
|
|
|
out.params.message +=
|
|
|
|
"your extension client (VSIX file). Make sure to uninstall "
|
|
|
|
"the cquery extension and restart vscode before "
|
|
|
|
"reinstalling.";
|
2017-05-20 19:31:07 +00:00
|
|
|
out.Write(std::cout);
|
|
|
|
}
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
// Make sure cache directory is valid.
|
|
|
|
if (config->cacheDirectory.empty()) {
|
2017-11-18 19:02:09 +00:00
|
|
|
LOG_S(FATAL) << "Exiting; no cache directory";
|
2017-04-23 20:19:09 +00:00
|
|
|
exit(1);
|
|
|
|
}
|
2017-11-17 22:45:49 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
config->cacheDirectory = NormalizePath(config->cacheDirectory);
|
2017-05-21 19:51:15 +00:00
|
|
|
EnsureEndsInSlash(config->cacheDirectory);
|
2017-04-23 20:19:09 +00:00
|
|
|
MakeDirectoryRecursive(config->cacheDirectory);
|
|
|
|
|
2017-10-25 01:02:15 +00:00
|
|
|
// Ensure there is a resource directory.
|
|
|
|
if (config->resourceDirectory.empty()) {
|
|
|
|
config->resourceDirectory = GetWorkingDirectory();
|
|
|
|
#if defined(_WIN32)
|
|
|
|
config->resourceDirectory +=
|
|
|
|
std::string("../../clang_resource_dir/");
|
|
|
|
#else
|
|
|
|
config->resourceDirectory += std::string("../clang_resource_dir/");
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
config->resourceDirectory = NormalizePath(config->resourceDirectory);
|
|
|
|
LOG_S(INFO) << "Using -resource-dir=" << config->resourceDirectory;
|
|
|
|
|
2017-11-17 21:31:48 +00:00
|
|
|
// Send initialization before starting indexers, so we don't send a
|
|
|
|
// status update too early.
|
|
|
|
// TODO: query request->params.capabilities.textDocument and support
|
|
|
|
// only things the client supports.
|
|
|
|
|
|
|
|
auto response = Out_InitializeResponse();
|
|
|
|
response.id = request->id;
|
|
|
|
|
|
|
|
// response.result.capabilities.textDocumentSync =
|
|
|
|
// lsTextDocumentSyncOptions();
|
|
|
|
// response.result.capabilities.textDocumentSync->openClose = true;
|
|
|
|
// response.result.capabilities.textDocumentSync->change =
|
|
|
|
// lsTextDocumentSyncKind::Full;
|
|
|
|
// response.result.capabilities.textDocumentSync->willSave = true;
|
|
|
|
// response.result.capabilities.textDocumentSync->willSaveWaitUntil =
|
|
|
|
// true;
|
|
|
|
response.result.capabilities.textDocumentSync =
|
|
|
|
lsTextDocumentSyncKind::Incremental;
|
|
|
|
|
|
|
|
response.result.capabilities.renameProvider = true;
|
|
|
|
|
|
|
|
response.result.capabilities.completionProvider =
|
|
|
|
lsCompletionOptions();
|
|
|
|
response.result.capabilities.completionProvider->resolveProvider =
|
|
|
|
false;
|
|
|
|
// vscode doesn't support trigger character sequences, so we use ':'
|
|
|
|
// for
|
|
|
|
// '::' and '>' for '->'. See
|
|
|
|
// https://github.com/Microsoft/language-server-protocol/issues/138.
|
|
|
|
response.result.capabilities.completionProvider->triggerCharacters = {
|
|
|
|
".", ":", ">", "#"};
|
|
|
|
|
|
|
|
response.result.capabilities.signatureHelpProvider =
|
|
|
|
lsSignatureHelpOptions();
|
|
|
|
// NOTE: If updating signature help tokens make sure to also update
|
|
|
|
// WorkingFile::FindClosestCallNameInBuffer.
|
|
|
|
response.result.capabilities.signatureHelpProvider
|
|
|
|
->triggerCharacters = {"(", ","};
|
|
|
|
|
|
|
|
response.result.capabilities.codeLensProvider = lsCodeLensOptions();
|
|
|
|
response.result.capabilities.codeLensProvider->resolveProvider =
|
|
|
|
false;
|
|
|
|
|
|
|
|
response.result.capabilities.definitionProvider = true;
|
|
|
|
response.result.capabilities.documentHighlightProvider = true;
|
|
|
|
response.result.capabilities.hoverProvider = true;
|
|
|
|
response.result.capabilities.referencesProvider = true;
|
|
|
|
|
|
|
|
response.result.capabilities.codeActionProvider = true;
|
|
|
|
|
|
|
|
response.result.capabilities.documentSymbolProvider = true;
|
|
|
|
response.result.capabilities.workspaceSymbolProvider = true;
|
|
|
|
|
|
|
|
response.result.capabilities.documentLinkProvider =
|
|
|
|
lsDocumentLinkOptions();
|
|
|
|
response.result.capabilities.documentLinkProvider->resolveProvider =
|
|
|
|
false;
|
|
|
|
|
|
|
|
ipc->SendOutMessageToClient(IpcId::Initialize, response);
|
|
|
|
|
2017-05-21 07:37:53 +00:00
|
|
|
// Set project root.
|
2017-09-22 01:14:57 +00:00
|
|
|
config->projectRoot =
|
|
|
|
NormalizePath(request->params.rootUri->GetPath());
|
2017-05-21 19:51:15 +00:00
|
|
|
EnsureEndsInSlash(config->projectRoot);
|
2017-05-21 07:37:53 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
// Start indexer threads.
|
2017-07-30 04:24:02 +00:00
|
|
|
if (config->indexerCount == 0) {
|
2017-10-25 02:18:47 +00:00
|
|
|
// If the user has not specified how many indexers to run, try to
|
|
|
|
// guess an appropriate value. Default to 80% utilization.
|
|
|
|
const float kDefaultTargetUtilization = 0.8;
|
2017-09-22 01:14:57 +00:00
|
|
|
config->indexerCount =
|
2017-10-25 02:18:47 +00:00
|
|
|
std::thread::hardware_concurrency() * kDefaultTargetUtilization;
|
|
|
|
if (config->indexerCount <= 0)
|
|
|
|
config->indexerCount = 1;
|
2017-07-30 04:24:02 +00:00
|
|
|
}
|
2017-10-25 02:18:47 +00:00
|
|
|
LOG_S(INFO) << "Starting " << config->indexerCount << " indexers";
|
2017-07-30 04:24:02 +00:00
|
|
|
for (int i = 0; i < config->indexerCount; ++i) {
|
2017-09-24 00:36:28 +00:00
|
|
|
WorkThread::StartThread("indexer" + std::to_string(i), [=]() {
|
2017-09-22 01:14:57 +00:00
|
|
|
return IndexMain(config, file_consumer_shared, timestamp_manager,
|
|
|
|
import_manager, project, working_files, waiter,
|
|
|
|
queue);
|
2017-04-23 20:19:09 +00:00
|
|
|
});
|
|
|
|
}
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-05-21 19:51:15 +00:00
|
|
|
Timer time;
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
// Open up / load the project.
|
2017-11-21 16:47:28 +00:00
|
|
|
project->Load(config->extraClangArguments,
|
|
|
|
config->compilationDatabaseDirectory,
|
|
|
|
project_path, config->resourceDirectory);
|
2017-09-22 01:14:57 +00:00
|
|
|
time.ResetAndPrint("[perf] Loaded compilation entries (" +
|
|
|
|
std::to_string(project->entries.size()) +
|
|
|
|
" files)");
|
2017-03-26 21:40:34 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
// Start scanning include directories before dispatching project
|
|
|
|
// files, because that takes a long time.
|
2017-05-27 04:21:00 +00:00
|
|
|
include_complete->Rescan();
|
2017-04-16 23:52:42 +00:00
|
|
|
|
2017-05-21 19:51:15 +00:00
|
|
|
time.Reset();
|
2017-09-22 01:14:57 +00:00
|
|
|
project->ForAllFilteredFiles(
|
|
|
|
config, [&](int i, const Project::Entry& entry) {
|
|
|
|
// std::cerr << "[" << i << "/" << (project->entries.size() - 1)
|
|
|
|
// << "] Dispatching index request for file " << entry.filename
|
|
|
|
// << std::endl;
|
|
|
|
bool is_interactive =
|
|
|
|
working_files->GetFileByFilename(entry.filename) != nullptr;
|
|
|
|
queue->index_request.Enqueue(Index_Request(
|
|
|
|
entry.filename, entry.args, is_interactive, nullopt));
|
|
|
|
});
|
2017-07-30 04:24:02 +00:00
|
|
|
|
|
|
|
// We need to support multiple concurrent index processes.
|
2017-05-21 19:51:15 +00:00
|
|
|
time.ResetAndPrint("[perf] Dispatched initial index requests");
|
2017-04-23 20:19:09 +00:00
|
|
|
}
|
2017-04-16 23:52:42 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-04-14 08:21:03 +00:00
|
|
|
|
2017-05-03 06:45:10 +00:00
|
|
|
case IpcId::Exit: {
|
2017-11-18 19:02:09 +00:00
|
|
|
LOG_S(INFO) << "Exiting; got IpcId::Exit";
|
2017-05-03 06:45:10 +00:00
|
|
|
exit(0);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::CqueryFreshenIndex: {
|
2017-07-28 02:14:33 +00:00
|
|
|
LOG_S(INFO) << "Freshening " << project->entries.size() << " files";
|
2017-09-24 00:36:28 +00:00
|
|
|
|
|
|
|
// TODO: think about this flow and test it more.
|
|
|
|
|
|
|
|
// Unmark all files whose timestamp has changed.
|
|
|
|
CacheLoader cache_loader(config);
|
|
|
|
for (const auto& file : db->files) {
|
2017-10-17 05:44:58 +00:00
|
|
|
if (!file.def)
|
2017-09-24 00:36:28 +00:00
|
|
|
continue;
|
|
|
|
|
|
|
|
optional<int64_t> modification_timestamp =
|
2017-10-17 05:44:58 +00:00
|
|
|
GetLastModificationTime(file.def->path);
|
2017-09-24 00:36:28 +00:00
|
|
|
if (!modification_timestamp)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
optional<int64_t> cached_modification =
|
|
|
|
timestamp_manager->GetLastCachedModificationTime(&cache_loader,
|
2017-10-17 05:44:58 +00:00
|
|
|
file.def->path);
|
2017-09-24 00:36:28 +00:00
|
|
|
if (modification_timestamp != cached_modification)
|
2017-10-17 05:44:58 +00:00
|
|
|
file_consumer_shared->Reset(file.def->path);
|
2017-09-24 00:36:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Send index requests for every file.
|
2017-09-22 01:14:57 +00:00
|
|
|
project->ForAllFilteredFiles(
|
|
|
|
config, [&](int i, const Project::Entry& entry) {
|
|
|
|
LOG_S(INFO) << "[" << i << "/" << (project->entries.size() - 1)
|
|
|
|
<< "] Dispatching index request for file "
|
|
|
|
<< entry.filename;
|
|
|
|
bool is_interactive =
|
|
|
|
working_files->GetFileByFilename(entry.filename) != nullptr;
|
|
|
|
queue->index_request.Enqueue(Index_Request(
|
|
|
|
entry.filename, entry.args, is_interactive, nullopt));
|
|
|
|
});
|
2017-04-14 08:21:03 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-04-19 07:52:48 +00:00
|
|
|
|
2017-05-24 07:17:29 +00:00
|
|
|
case IpcId::CqueryTypeHierarchyTree: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_CqueryTypeHierarchyTree>();
|
2017-05-24 07:17:29 +00:00
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file))
|
2017-05-24 07:17:29 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path);
|
2017-05-24 07:17:29 +00:00
|
|
|
|
|
|
|
Out_CqueryTypeHierarchyTree response;
|
|
|
|
response.id = msg->id;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const SymbolRef& ref :
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.position)) {
|
2017-05-24 07:17:29 +00:00
|
|
|
if (ref.idx.kind == SymbolKind::Type) {
|
2017-09-22 01:14:57 +00:00
|
|
|
response.result = BuildInheritanceHierarchyForType(
|
|
|
|
db, working_files, QueryTypeId(ref.idx.idx));
|
2017-07-21 02:53:08 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (ref.idx.kind == SymbolKind::Func) {
|
2017-09-22 01:14:57 +00:00
|
|
|
response.result = BuildInheritanceHierarchyForFunc(
|
|
|
|
db, working_files, QueryFuncId(ref.idx.idx));
|
2017-05-24 07:17:29 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ipc->SendOutMessageToClient(IpcId::CqueryTypeHierarchyTree, response);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case IpcId::CqueryCallTreeInitial: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_CqueryCallTreeInitial>();
|
2017-05-24 07:17:29 +00:00
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file))
|
2017-05-24 07:17:29 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path);
|
2017-05-24 07:17:29 +00:00
|
|
|
|
|
|
|
Out_CqueryCallTree response;
|
|
|
|
response.id = msg->id;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const SymbolRef& ref :
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.position)) {
|
2017-05-24 07:17:29 +00:00
|
|
|
if (ref.idx.kind == SymbolKind::Func) {
|
2017-09-22 01:14:57 +00:00
|
|
|
response.result = BuildInitialCallTree(db, working_files,
|
|
|
|
QueryFuncId(ref.idx.idx));
|
2017-05-24 07:17:29 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ipc->SendOutMessageToClient(IpcId::CqueryCallTreeInitial, response);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case IpcId::CqueryCallTreeExpand: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_CqueryCallTreeExpand>();
|
2017-05-24 07:17:29 +00:00
|
|
|
|
|
|
|
Out_CqueryCallTree response;
|
|
|
|
response.id = msg->id;
|
|
|
|
|
|
|
|
auto func_id = db->usr_to_func.find(msg->params.usr);
|
|
|
|
if (func_id != db->usr_to_func.end())
|
2017-09-22 01:14:57 +00:00
|
|
|
response.result =
|
|
|
|
BuildExpandCallTree(db, working_files, func_id->second);
|
2017-05-24 07:17:29 +00:00
|
|
|
|
|
|
|
ipc->SendOutMessageToClient(IpcId::CqueryCallTreeExpand, response);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2017-05-07 06:56:04 +00:00
|
|
|
case IpcId::CqueryVars: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_CqueryVars>();
|
2017-05-07 06:56:04 +00:00
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file))
|
2017-05-07 06:56:04 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path);
|
2017-05-07 06:56:04 +00:00
|
|
|
|
|
|
|
Out_LocationList response;
|
|
|
|
response.id = msg->id;
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const SymbolRef& ref :
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.position)) {
|
2017-05-07 06:56:04 +00:00
|
|
|
if (ref.idx.kind == SymbolKind::Type) {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryType& type = db->types[ref.idx.idx];
|
2017-09-22 01:14:57 +00:00
|
|
|
std::vector<QueryLocation> locations =
|
2017-10-17 05:44:58 +00:00
|
|
|
ToQueryLocation(db, type.instances);
|
2017-05-07 06:56:04 +00:00
|
|
|
response.result = GetLsLocations(db, working_files, locations);
|
|
|
|
}
|
|
|
|
}
|
2017-05-24 07:17:29 +00:00
|
|
|
ipc->SendOutMessageToClient(IpcId::CqueryVars, response);
|
2017-05-07 06:56:04 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case IpcId::CqueryCallers: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_CqueryCallers>();
|
2017-05-07 06:56:04 +00:00
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file))
|
2017-05-07 06:56:04 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path);
|
2017-05-07 06:56:04 +00:00
|
|
|
|
|
|
|
Out_LocationList response;
|
|
|
|
response.id = msg->id;
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const SymbolRef& ref :
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.position)) {
|
2017-05-07 06:56:04 +00:00
|
|
|
if (ref.idx.kind == SymbolKind::Func) {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFunc& func = db->funcs[ref.idx.idx];
|
2017-09-22 01:14:57 +00:00
|
|
|
std::vector<QueryLocation> locations =
|
2017-10-17 05:44:58 +00:00
|
|
|
ToQueryLocation(db, func.callers);
|
2017-09-22 01:14:57 +00:00
|
|
|
for (QueryFuncRef func_ref :
|
2017-10-17 05:44:58 +00:00
|
|
|
GetCallersForAllBaseFunctions(db, func))
|
2017-07-19 07:11:16 +00:00
|
|
|
locations.push_back(func_ref.loc);
|
2017-09-22 01:14:57 +00:00
|
|
|
for (QueryFuncRef func_ref :
|
2017-10-17 05:44:58 +00:00
|
|
|
GetCallersForAllDerivedFunctions(db, func))
|
2017-07-19 07:11:16 +00:00
|
|
|
locations.push_back(func_ref.loc);
|
|
|
|
|
2017-05-07 06:56:04 +00:00
|
|
|
response.result = GetLsLocations(db, working_files, locations);
|
|
|
|
}
|
|
|
|
}
|
2017-05-24 07:17:29 +00:00
|
|
|
ipc->SendOutMessageToClient(IpcId::CqueryCallers, response);
|
2017-05-07 06:56:04 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case IpcId::CqueryBase: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_CqueryBase>();
|
2017-05-07 06:56:04 +00:00
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file))
|
2017-05-07 06:56:04 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path);
|
2017-05-07 06:56:04 +00:00
|
|
|
|
|
|
|
Out_LocationList response;
|
|
|
|
response.id = msg->id;
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const SymbolRef& ref :
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.position)) {
|
2017-05-07 06:56:04 +00:00
|
|
|
if (ref.idx.kind == SymbolKind::Type) {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryType& type = db->types[ref.idx.idx];
|
|
|
|
if (!type.def)
|
2017-09-22 01:14:57 +00:00
|
|
|
continue;
|
|
|
|
std::vector<QueryLocation> locations =
|
2017-10-17 05:44:58 +00:00
|
|
|
ToQueryLocation(db, type.def->parents);
|
2017-05-07 06:56:04 +00:00
|
|
|
response.result = GetLsLocations(db, working_files, locations);
|
2017-09-22 01:14:57 +00:00
|
|
|
} else if (ref.idx.kind == SymbolKind::Func) {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFunc& func = db->funcs[ref.idx.idx];
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<QueryLocation> location =
|
2017-10-17 05:44:58 +00:00
|
|
|
GetBaseDefinitionOrDeclarationSpelling(db, func);
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!location)
|
|
|
|
continue;
|
|
|
|
optional<lsLocation> ls_loc =
|
|
|
|
GetLsLocation(db, working_files, *location);
|
|
|
|
if (!ls_loc)
|
|
|
|
continue;
|
2017-05-07 06:56:04 +00:00
|
|
|
response.result.push_back(*ls_loc);
|
|
|
|
}
|
|
|
|
}
|
2017-05-24 07:17:29 +00:00
|
|
|
ipc->SendOutMessageToClient(IpcId::CqueryBase, response);
|
2017-05-07 06:56:04 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case IpcId::CqueryDerived: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_CqueryDerived>();
|
2017-05-07 06:56:04 +00:00
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file))
|
2017-05-07 06:56:04 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path);
|
2017-05-07 06:56:04 +00:00
|
|
|
|
|
|
|
Out_LocationList response;
|
|
|
|
response.id = msg->id;
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const SymbolRef& ref :
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.position)) {
|
2017-05-07 06:56:04 +00:00
|
|
|
if (ref.idx.kind == SymbolKind::Type) {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryType& type = db->types[ref.idx.idx];
|
2017-09-22 01:14:57 +00:00
|
|
|
std::vector<QueryLocation> locations =
|
2017-10-17 05:44:58 +00:00
|
|
|
ToQueryLocation(db, type.derived);
|
2017-05-07 06:56:04 +00:00
|
|
|
response.result = GetLsLocations(db, working_files, locations);
|
2017-09-22 01:14:57 +00:00
|
|
|
} else if (ref.idx.kind == SymbolKind::Func) {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFunc& func = db->funcs[ref.idx.idx];
|
2017-09-22 01:14:57 +00:00
|
|
|
std::vector<QueryLocation> locations =
|
2017-10-17 05:44:58 +00:00
|
|
|
ToQueryLocation(db, func.derived);
|
2017-05-07 06:56:04 +00:00
|
|
|
response.result = GetLsLocations(db, working_files, locations);
|
|
|
|
}
|
|
|
|
}
|
2017-05-24 07:17:29 +00:00
|
|
|
ipc->SendOutMessageToClient(IpcId::CqueryDerived, response);
|
2017-05-07 06:56:04 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentDidOpen: {
|
|
|
|
// NOTE: This function blocks code lens. If it starts taking a long time
|
|
|
|
// we will need to find a way to unblock the code lens request.
|
|
|
|
|
|
|
|
Timer time;
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentDidOpen>();
|
2017-05-26 06:40:38 +00:00
|
|
|
std::string path = msg->params.textDocument.uri.GetPath();
|
2017-04-23 20:19:09 +00:00
|
|
|
WorkingFile* working_file = working_files->OnOpen(msg->params);
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<std::string> cached_file_contents =
|
|
|
|
LoadCachedFileContents(config, path);
|
2017-04-23 20:19:09 +00:00
|
|
|
if (cached_file_contents)
|
|
|
|
working_file->SetIndexContent(*cached_file_contents);
|
|
|
|
else
|
|
|
|
working_file->SetIndexContent(working_file->buffer_content);
|
2017-05-20 21:45:46 +00:00
|
|
|
|
2017-10-28 22:09:14 +00:00
|
|
|
QueryFile* file = nullptr;
|
|
|
|
FindFileOrFail(db, nullopt, path, &file);
|
2017-11-09 07:06:32 +00:00
|
|
|
if (file && file->def) {
|
2017-10-28 22:09:14 +00:00
|
|
|
EmitInactiveLines(working_file, file->def->inactive_regions);
|
2017-11-09 07:06:32 +00:00
|
|
|
EmitSemanticHighlighting(db, working_file, file);
|
|
|
|
}
|
2017-05-20 21:45:46 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
time.ResetAndPrint(
|
|
|
|
"[querydb] Loading cached index file for DidOpen (blocks "
|
|
|
|
"CodeLens)");
|
2017-04-14 08:21:03 +00:00
|
|
|
|
2017-05-27 04:21:00 +00:00
|
|
|
include_complete->AddFile(working_file->filename);
|
|
|
|
clang_complete->NotifyView(path);
|
2017-08-17 03:39:00 +00:00
|
|
|
|
|
|
|
// Submit new index request.
|
2017-09-22 01:14:57 +00:00
|
|
|
const Project::Entry& entry =
|
|
|
|
project->FindCompilationEntryForFile(path);
|
|
|
|
queue->index_request.PriorityEnqueue(Index_Request(
|
|
|
|
entry.filename, entry.args, true /*is_interactive*/, nullopt));
|
2017-05-21 19:51:15 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-04-23 21:24:06 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentDidChange: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentDidChange>();
|
2017-05-26 06:40:38 +00:00
|
|
|
std::string path = msg->params.textDocument.uri.GetPath();
|
2017-04-23 20:19:09 +00:00
|
|
|
working_files->OnChange(msg->params);
|
2017-05-27 04:21:00 +00:00
|
|
|
clang_complete->NotifyEdit(path);
|
2017-09-22 02:25:33 +00:00
|
|
|
clang_complete->DiagnosticsUpdate(
|
|
|
|
msg->params.textDocument.AsTextDocumentIdentifier());
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-04-23 21:24:06 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentDidClose: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentDidClose>();
|
2017-10-17 18:43:33 +00:00
|
|
|
std::string path = msg->params.textDocument.uri.GetPath();
|
2017-05-29 23:33:57 +00:00
|
|
|
|
|
|
|
// Clear any diagnostics for the file.
|
|
|
|
Out_TextDocumentPublishDiagnostics diag;
|
|
|
|
diag.params.uri = msg->params.textDocument.uri;
|
2017-09-22 01:14:57 +00:00
|
|
|
IpcManager::instance()->SendOutMessageToClient(
|
|
|
|
IpcId::TextDocumentPublishDiagnostics, diag);
|
2017-05-29 23:33:57 +00:00
|
|
|
|
|
|
|
// Remove internal state.
|
2017-04-23 20:19:09 +00:00
|
|
|
working_files->OnClose(msg->params);
|
2017-10-17 18:43:33 +00:00
|
|
|
clang_complete->NotifyClose(path);
|
2017-05-29 23:33:57 +00:00
|
|
|
|
2017-04-15 05:14:05 +00:00
|
|
|
break;
|
2017-04-14 08:21:03 +00:00
|
|
|
}
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentDidSave: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentDidSave>();
|
2017-04-17 01:22:59 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
std::string path = msg->params.textDocument.uri.GetPath();
|
2017-05-09 05:09:57 +00:00
|
|
|
// Send out an index request, and copy the current buffer state so we
|
|
|
|
// can update the cached index contents when the index is done.
|
|
|
|
//
|
|
|
|
// We also do not index if there is already an index request.
|
|
|
|
//
|
|
|
|
// TODO: Cancel outgoing index request. Might be tricky to make
|
|
|
|
// efficient since we have to cancel.
|
|
|
|
// - we could have an |atomic<int> active_cancellations| variable
|
|
|
|
// that all of the indexers check before accepting an index. if
|
|
|
|
// zero we don't slow down fast-path. if non-zero we acquire
|
|
|
|
// mutex and check to see if we should skip the current request.
|
|
|
|
// if so, ignore that index response.
|
2017-07-30 04:24:02 +00:00
|
|
|
// TODO: send as priority request
|
|
|
|
Project::Entry entry = project->FindCompilationEntryForFile(path);
|
2017-09-22 01:14:57 +00:00
|
|
|
queue->index_request.Enqueue(Index_Request(
|
|
|
|
entry.filename, entry.args, true /*is_interactive*/, nullopt));
|
2017-05-26 06:40:38 +00:00
|
|
|
|
2017-05-27 04:21:00 +00:00
|
|
|
clang_complete->NotifySave(path);
|
2017-04-17 01:22:59 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-04-17 01:22:59 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentRename: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentRename>();
|
2017-04-17 01:22:59 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
QueryFileId file_id;
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file, &file_id))
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path);
|
2017-03-26 06:47:59 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
Out_TextDocumentRename response;
|
|
|
|
response.id = msg->id;
|
2017-03-26 06:47:59 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const SymbolRef& ref :
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.position)) {
|
2017-04-23 20:19:09 +00:00
|
|
|
// Found symbol. Return references to rename.
|
|
|
|
std::vector<QueryLocation> uses = GetUsesOfSymbol(db, ref.idx);
|
2017-09-22 01:14:57 +00:00
|
|
|
response.result =
|
|
|
|
BuildWorkspaceEdit(db, working_files, uses, msg->params.newName);
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-04-03 02:21:21 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
ipc->SendOutMessageToClient(IpcId::TextDocumentRename, response);
|
2017-04-03 02:21:21 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-04-15 05:14:05 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentCompletion: {
|
2017-06-29 04:59:16 +00:00
|
|
|
auto msg = std::shared_ptr<Ipc_TextDocumentComplete>(
|
|
|
|
static_cast<Ipc_TextDocumentComplete*>(message.release()));
|
2017-04-15 05:14:05 +00:00
|
|
|
|
2017-05-27 04:21:00 +00:00
|
|
|
std::string path = msg->params.textDocument.uri.GetPath();
|
2017-05-26 07:10:55 +00:00
|
|
|
WorkingFile* file = working_files->GetFileByFilename(path);
|
2017-05-20 08:07:29 +00:00
|
|
|
|
2017-05-27 06:51:32 +00:00
|
|
|
// It shouldn't be possible, but sometimes vscode will send queries out
|
|
|
|
// of order, ie, we get completion request before buffer content update.
|
|
|
|
std::string buffer_line;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (msg->params.position.line >= 0 &&
|
|
|
|
msg->params.position.line < file->all_buffer_lines.size())
|
2017-05-27 06:51:32 +00:00
|
|
|
buffer_line = file->all_buffer_lines[msg->params.position.line];
|
2017-05-21 07:37:53 +00:00
|
|
|
|
|
|
|
if (ShouldRunIncludeCompletion(buffer_line)) {
|
2017-05-20 06:35:14 +00:00
|
|
|
Out_TextDocumentComplete complete_response;
|
|
|
|
complete_response.id = msg->id;
|
2017-05-21 21:01:52 +00:00
|
|
|
|
2017-05-21 19:51:15 +00:00
|
|
|
{
|
2017-09-22 01:14:57 +00:00
|
|
|
std::unique_lock<std::mutex> lock(
|
|
|
|
include_complete->completion_items_mutex, std::defer_lock);
|
2017-05-27 04:21:00 +00:00
|
|
|
if (include_complete->is_scanning)
|
2017-05-21 19:51:15 +00:00
|
|
|
lock.lock();
|
|
|
|
complete_response.result.items.assign(
|
2017-09-22 01:14:57 +00:00
|
|
|
include_complete->completion_items.begin(),
|
|
|
|
include_complete->completion_items.end());
|
2017-05-21 19:51:15 +00:00
|
|
|
if (lock)
|
|
|
|
lock.unlock();
|
|
|
|
|
|
|
|
// Update textEdit params.
|
|
|
|
for (lsCompletionItem& item : complete_response.result.items) {
|
2017-05-27 04:21:00 +00:00
|
|
|
item.textEdit->range.start.line = msg->params.position.line;
|
2017-05-21 19:51:15 +00:00
|
|
|
item.textEdit->range.start.character = 0;
|
2017-05-27 04:21:00 +00:00
|
|
|
item.textEdit->range.end.line = msg->params.position.line;
|
2017-05-21 19:51:15 +00:00
|
|
|
item.textEdit->range.end.character = (int)buffer_line.size();
|
2017-05-21 07:37:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-16 02:28:49 +00:00
|
|
|
FilterCompletionResponse(&complete_response, buffer_line);
|
2017-09-22 01:14:57 +00:00
|
|
|
ipc->SendOutMessageToClient(IpcId::TextDocumentCompletion,
|
|
|
|
complete_response);
|
|
|
|
} else {
|
2017-05-26 07:10:55 +00:00
|
|
|
bool is_global_completion = false;
|
2017-06-16 02:28:49 +00:00
|
|
|
std::string existing_completion;
|
|
|
|
if (file) {
|
2017-09-22 01:14:57 +00:00
|
|
|
msg->params.position = file->FindStableCompletionSource(
|
|
|
|
msg->params.position, &is_global_completion,
|
|
|
|
&existing_completion);
|
2017-06-16 02:28:49 +00:00
|
|
|
}
|
|
|
|
|
2017-06-09 07:08:06 +00:00
|
|
|
ClangCompleteManager::OnComplete callback = std::bind(
|
2017-11-17 21:31:48 +00:00
|
|
|
[global_code_complete_cache, non_global_code_complete_cache,
|
|
|
|
is_global_completion, existing_completion,
|
2017-09-22 01:14:57 +00:00
|
|
|
msg](const NonElidedVector<lsCompletionItem>& results,
|
|
|
|
bool is_cached_result) {
|
|
|
|
|
|
|
|
Out_TextDocumentComplete complete_response;
|
|
|
|
complete_response.id = msg->id;
|
|
|
|
complete_response.result.items = results;
|
|
|
|
|
|
|
|
// Emit completion results.
|
|
|
|
FilterCompletionResponse(&complete_response,
|
|
|
|
existing_completion);
|
|
|
|
IpcManager::instance()->SendOutMessageToClient(
|
|
|
|
IpcId::TextDocumentCompletion, complete_response);
|
|
|
|
|
|
|
|
// Cache completion results.
|
|
|
|
if (!is_cached_result) {
|
|
|
|
std::string path = msg->params.textDocument.uri.GetPath();
|
|
|
|
if (is_global_completion) {
|
|
|
|
global_code_complete_cache->WithLock([&]() {
|
|
|
|
global_code_complete_cache->cached_path_ = path;
|
|
|
|
global_code_complete_cache->cached_results_ = results;
|
|
|
|
});
|
|
|
|
} else {
|
|
|
|
non_global_code_complete_cache->WithLock([&]() {
|
|
|
|
non_global_code_complete_cache->cached_path_ = path;
|
|
|
|
non_global_code_complete_cache
|
|
|
|
->cached_completion_position_ = msg->params.position;
|
|
|
|
non_global_code_complete_cache->cached_results_ = results;
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
std::placeholders::_1, std::placeholders::_2);
|
2017-05-21 07:37:53 +00:00
|
|
|
|
2017-06-30 06:51:22 +00:00
|
|
|
bool is_cache_match = false;
|
|
|
|
global_code_complete_cache->WithLock([&]() {
|
2017-09-22 01:14:57 +00:00
|
|
|
is_cache_match =
|
|
|
|
is_global_completion &&
|
|
|
|
global_code_complete_cache->cached_path_ == path &&
|
|
|
|
!global_code_complete_cache->cached_results_.empty();
|
2017-06-30 06:51:22 +00:00
|
|
|
});
|
|
|
|
if (is_cache_match) {
|
2017-06-10 04:13:16 +00:00
|
|
|
ClangCompleteManager::OnComplete freshen_global =
|
2017-09-22 01:14:57 +00:00
|
|
|
[global_code_complete_cache](
|
|
|
|
NonElidedVector<lsCompletionItem> results,
|
|
|
|
bool is_cached_result) {
|
|
|
|
|
|
|
|
assert(!is_cached_result);
|
|
|
|
|
|
|
|
// note: path is updated in the normal completion handler.
|
|
|
|
global_code_complete_cache->WithLock([&]() {
|
|
|
|
global_code_complete_cache->cached_results_ = results;
|
|
|
|
});
|
|
|
|
};
|
2017-05-26 07:10:55 +00:00
|
|
|
|
2017-06-30 06:51:22 +00:00
|
|
|
global_code_complete_cache->WithLock([&]() {
|
2017-09-22 01:14:57 +00:00
|
|
|
callback(global_code_complete_cache->cached_results_,
|
|
|
|
true /*is_cached_result*/);
|
2017-06-30 06:51:22 +00:00
|
|
|
});
|
|
|
|
clang_complete->CodeComplete(msg->params, freshen_global);
|
2017-09-22 01:14:57 +00:00
|
|
|
} else if (non_global_code_complete_cache->IsCacheValid(
|
|
|
|
msg->params)) {
|
2017-06-30 06:51:22 +00:00
|
|
|
non_global_code_complete_cache->WithLock([&]() {
|
2017-09-22 01:14:57 +00:00
|
|
|
callback(non_global_code_complete_cache->cached_results_,
|
|
|
|
true /*is_cached_result*/);
|
2017-06-30 06:51:22 +00:00
|
|
|
});
|
2017-09-22 01:14:57 +00:00
|
|
|
} else {
|
2017-06-30 06:51:22 +00:00
|
|
|
clang_complete->CodeComplete(msg->params, callback);
|
2017-05-21 07:37:53 +00:00
|
|
|
}
|
2017-05-20 08:07:29 +00:00
|
|
|
}
|
2017-04-14 06:43:50 +00:00
|
|
|
|
|
|
|
break;
|
|
|
|
}
|
2017-04-19 07:52:48 +00:00
|
|
|
|
2017-05-15 07:28:53 +00:00
|
|
|
case IpcId::TextDocumentSignatureHelp: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentSignatureHelp>();
|
2017-05-20 08:20:37 +00:00
|
|
|
lsTextDocumentPositionParams& params = msg->params;
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* file =
|
|
|
|
working_files->GetFileByFilename(params.textDocument.uri.GetPath());
|
2017-05-15 07:28:53 +00:00
|
|
|
std::string search;
|
|
|
|
int active_param = 0;
|
|
|
|
if (file) {
|
|
|
|
lsPosition completion_position;
|
2017-09-22 01:14:57 +00:00
|
|
|
search = file->FindClosestCallNameInBuffer(
|
|
|
|
params.position, &active_param, &completion_position);
|
2017-05-15 07:28:53 +00:00
|
|
|
params.position = completion_position;
|
|
|
|
}
|
|
|
|
if (search.empty())
|
|
|
|
break;
|
|
|
|
|
2017-06-30 06:51:22 +00:00
|
|
|
ClangCompleteManager::OnComplete callback = std::bind(
|
2017-09-22 01:14:57 +00:00
|
|
|
[signature_cache](BaseIpcMessage* message, std::string search,
|
|
|
|
int active_param,
|
|
|
|
const NonElidedVector<lsCompletionItem>& results,
|
|
|
|
bool is_cached_result) {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentSignatureHelp>();
|
2017-09-22 01:14:57 +00:00
|
|
|
auto ipc = IpcManager::instance();
|
|
|
|
|
|
|
|
Out_TextDocumentSignatureHelp response;
|
|
|
|
response.id = msg->id;
|
|
|
|
|
|
|
|
for (auto& result : results) {
|
|
|
|
if (result.label != search)
|
|
|
|
continue;
|
2017-05-15 07:28:53 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
lsSignatureInformation signature;
|
|
|
|
signature.label = result.detail;
|
|
|
|
for (auto& parameter : result.parameters_) {
|
|
|
|
lsParameterInformation ls_param;
|
|
|
|
ls_param.label = parameter;
|
|
|
|
signature.parameters.push_back(ls_param);
|
|
|
|
}
|
|
|
|
response.result.signatures.push_back(signature);
|
|
|
|
}
|
2017-05-15 07:28:53 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
// Guess the signature the user wants based on available parameter
|
|
|
|
// count.
|
|
|
|
response.result.activeSignature = 0;
|
|
|
|
for (size_t i = 0; i < response.result.signatures.size(); ++i) {
|
|
|
|
if (active_param < response.result.signatures.size()) {
|
|
|
|
response.result.activeSignature = (int)i;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2017-05-15 07:28:53 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
// Set signature to what we parsed from the working file.
|
|
|
|
response.result.activeParameter = active_param;
|
|
|
|
|
|
|
|
Timer timer;
|
|
|
|
ipc->SendOutMessageToClient(IpcId::TextDocumentSignatureHelp,
|
|
|
|
response);
|
|
|
|
|
|
|
|
if (!is_cached_result) {
|
|
|
|
signature_cache->WithLock([&]() {
|
|
|
|
signature_cache->cached_path_ =
|
|
|
|
msg->params.textDocument.uri.GetPath();
|
|
|
|
signature_cache->cached_completion_position_ =
|
|
|
|
msg->params.position;
|
|
|
|
signature_cache->cached_results_ = results;
|
|
|
|
});
|
|
|
|
}
|
2017-05-20 08:20:37 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
delete message;
|
|
|
|
},
|
|
|
|
message.release(), search, active_param, std::placeholders::_1,
|
|
|
|
std::placeholders::_2);
|
2017-05-15 07:28:53 +00:00
|
|
|
|
2017-05-20 08:23:01 +00:00
|
|
|
if (signature_cache->IsCacheValid(params)) {
|
2017-06-30 06:51:22 +00:00
|
|
|
signature_cache->WithLock([&]() {
|
2017-09-22 01:14:57 +00:00
|
|
|
callback(signature_cache->cached_results_,
|
|
|
|
true /*is_cached_result*/);
|
2017-06-30 06:51:22 +00:00
|
|
|
});
|
2017-09-22 01:14:57 +00:00
|
|
|
} else {
|
2017-05-27 04:21:00 +00:00
|
|
|
clang_complete->CodeComplete(params, std::move(callback));
|
2017-05-20 08:20:37 +00:00
|
|
|
}
|
2017-05-15 07:28:53 +00:00
|
|
|
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentDefinition: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentDefinition>();
|
2017-04-14 06:43:50 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
QueryFileId file_id;
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file, &file_id))
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path);
|
2017-04-15 05:14:05 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
Out_TextDocumentDefinition response;
|
|
|
|
response.id = msg->id;
|
2017-04-15 05:14:05 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
int target_line = msg->params.position.line + 1;
|
|
|
|
int target_column = msg->params.position.character + 1;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const SymbolRef& ref :
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.position)) {
|
2017-04-23 20:19:09 +00:00
|
|
|
// Found symbol. Return definition.
|
|
|
|
|
|
|
|
// Special cases which are handled:
|
|
|
|
// - symbol has declaration but no definition (ie, pure virtual)
|
|
|
|
// - start at spelling but end at extent for better mouse tooltip
|
|
|
|
// - goto declaration while in definition of recursive type
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<QueryLocation> def_loc =
|
|
|
|
GetDefinitionSpellingOfSymbol(db, ref.idx);
|
2017-04-23 20:19:09 +00:00
|
|
|
|
|
|
|
// We use spelling start and extent end because this causes vscode to
|
|
|
|
// highlight the entire definition when previewing / hoving with the
|
|
|
|
// mouse.
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<QueryLocation> def_extent =
|
|
|
|
GetDefinitionExtentOfSymbol(db, ref.idx);
|
2017-04-23 20:19:09 +00:00
|
|
|
if (def_loc && def_extent)
|
|
|
|
def_loc->range.end = def_extent->range.end;
|
|
|
|
|
|
|
|
// If the cursor is currently at or in the definition we should goto
|
|
|
|
// the declaration if possible. We also want to use declarations if
|
|
|
|
// we're pointing to, ie, a pure virtual function which has no
|
|
|
|
// definition.
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!def_loc ||
|
|
|
|
(def_loc->path == file_id &&
|
|
|
|
def_loc->range.Contains(target_line, target_column))) {
|
2017-04-23 20:19:09 +00:00
|
|
|
// Goto declaration.
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
std::vector<QueryLocation> declarations =
|
|
|
|
GetDeclarationsOfSymbolForGotoDefinition(db, ref.idx);
|
2017-04-23 20:19:09 +00:00
|
|
|
for (auto declaration : declarations) {
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsLocation> ls_declaration =
|
|
|
|
GetLsLocation(db, working_files, declaration);
|
2017-04-23 20:19:09 +00:00
|
|
|
if (ls_declaration)
|
|
|
|
response.result.push_back(*ls_declaration);
|
|
|
|
}
|
2017-09-22 01:14:57 +00:00
|
|
|
// We found some declarations. Break so we don't add the definition
|
|
|
|
// location.
|
2017-04-23 20:19:09 +00:00
|
|
|
if (!response.result.empty())
|
|
|
|
break;
|
|
|
|
}
|
2017-04-14 06:43:50 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
if (def_loc) {
|
|
|
|
PushBack(&response.result,
|
|
|
|
GetLsLocation(db, working_files, *def_loc));
|
|
|
|
}
|
2017-04-14 06:43:50 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
if (!response.result.empty())
|
|
|
|
break;
|
|
|
|
}
|
2017-04-14 05:18:02 +00:00
|
|
|
|
2017-05-21 03:46:15 +00:00
|
|
|
// No symbols - check for includes.
|
|
|
|
if (response.result.empty()) {
|
2017-10-17 05:44:58 +00:00
|
|
|
for (const IndexInclude& include : file->def->includes) {
|
2017-05-21 03:46:15 +00:00
|
|
|
if (include.line == target_line) {
|
|
|
|
lsLocation result;
|
|
|
|
result.uri = lsDocumentUri::FromPath(include.resolved_path);
|
|
|
|
response.result.push_back(result);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
ipc->SendOutMessageToClient(IpcId::TextDocumentDefinition, response);
|
2017-04-14 05:18:02 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-04-19 07:52:48 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentDocumentHighlight: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentDocumentHighlight>();
|
2017-04-14 05:18:02 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
QueryFileId file_id;
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file, &file_id))
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path);
|
2017-04-14 05:18:02 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
Out_TextDocumentDocumentHighlight response;
|
|
|
|
response.id = msg->id;
|
2017-04-14 05:18:02 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const SymbolRef& ref :
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.position)) {
|
2017-04-23 20:19:09 +00:00
|
|
|
// Found symbol. Return references to highlight.
|
|
|
|
std::vector<QueryLocation> uses = GetUsesOfSymbol(db, ref.idx);
|
|
|
|
response.result.reserve(uses.size());
|
|
|
|
for (const QueryLocation& use : uses) {
|
|
|
|
if (use.path != file_id)
|
|
|
|
continue;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsLocation> ls_location =
|
|
|
|
GetLsLocation(db, working_files, use);
|
2017-04-23 20:19:09 +00:00
|
|
|
if (!ls_location)
|
|
|
|
continue;
|
2017-04-14 05:18:02 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
lsDocumentHighlight highlight;
|
|
|
|
highlight.kind = lsDocumentHighlightKind::Text;
|
|
|
|
highlight.range = ls_location->range;
|
|
|
|
response.result.push_back(highlight);
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2017-04-10 05:34:06 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
ipc->SendOutMessageToClient(IpcId::TextDocumentDocumentHighlight,
|
|
|
|
response);
|
2017-04-10 05:34:06 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentHover: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentHover>();
|
2017-04-10 05:34:06 +00:00
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file))
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path);
|
2017-04-23 20:19:09 +00:00
|
|
|
|
|
|
|
Out_TextDocumentHover response;
|
|
|
|
response.id = msg->id;
|
2017-04-10 05:34:06 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const SymbolRef& ref :
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.position)) {
|
2017-04-23 20:19:09 +00:00
|
|
|
// Found symbol. Return hover.
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsRange> ls_range = GetLsRange(
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path), ref.loc.range);
|
2017-04-23 20:19:09 +00:00
|
|
|
if (!ls_range)
|
2017-04-15 05:14:05 +00:00
|
|
|
continue;
|
2017-04-10 05:34:06 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
response.result.contents = GetHoverForSymbol(db, ref.idx);
|
|
|
|
response.result.range = *ls_range;
|
|
|
|
break;
|
2017-04-10 05:34:06 +00:00
|
|
|
}
|
2017-04-23 20:19:09 +00:00
|
|
|
|
|
|
|
ipc->SendOutMessageToClient(IpcId::TextDocumentHover, response);
|
2017-04-15 05:14:05 +00:00
|
|
|
break;
|
2017-04-10 05:34:06 +00:00
|
|
|
}
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentReferences: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentReferences>();
|
2017-04-23 20:19:09 +00:00
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file))
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file =
|
2017-10-17 05:44:58 +00:00
|
|
|
working_files->GetFileByFilename(file->def->path);
|
2017-04-10 05:34:06 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
Out_TextDocumentReferences response;
|
|
|
|
response.id = msg->id;
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const SymbolRef& ref :
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.position)) {
|
2017-04-23 20:19:09 +00:00
|
|
|
optional<QueryLocation> excluded_declaration;
|
|
|
|
if (!msg->params.context.includeDeclaration) {
|
2017-07-28 02:14:33 +00:00
|
|
|
LOG_S(INFO) << "Excluding declaration in references";
|
2017-04-23 20:19:09 +00:00
|
|
|
excluded_declaration = GetDefinitionSpellingOfSymbol(db, ref.idx);
|
|
|
|
}
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
// Found symbol. Return references.
|
|
|
|
std::vector<QueryLocation> uses = GetUsesOfSymbol(db, ref.idx);
|
|
|
|
response.result.reserve(uses.size());
|
|
|
|
for (const QueryLocation& use : uses) {
|
2017-09-22 01:14:57 +00:00
|
|
|
if (excluded_declaration.has_value() &&
|
|
|
|
use == *excluded_declaration)
|
2017-04-23 20:19:09 +00:00
|
|
|
continue;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsLocation> ls_location =
|
|
|
|
GetLsLocation(db, working_files, use);
|
2017-04-23 20:19:09 +00:00
|
|
|
if (ls_location)
|
|
|
|
response.result.push_back(*ls_location);
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
ipc->SendOutMessageToClient(IpcId::TextDocumentReferences, response);
|
2017-03-29 06:33:38 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentDocumentSymbol: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentDocumentSymbol>();
|
2017-03-06 08:48:51 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
Out_TextDocumentDocumentSymbol response;
|
|
|
|
response.id = msg->id;
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file))
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-10-17 05:44:58 +00:00
|
|
|
for (SymbolRef ref : file->def->outline) {
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsSymbolInformation> info =
|
|
|
|
GetSymbolInfo(db, working_files, ref.idx);
|
2017-04-23 20:19:09 +00:00
|
|
|
if (!info)
|
|
|
|
continue;
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsLocation> location =
|
|
|
|
GetLsLocation(db, working_files, ref.loc);
|
2017-04-23 20:19:09 +00:00
|
|
|
if (!location)
|
|
|
|
continue;
|
|
|
|
info->location = *location;
|
|
|
|
response.result.push_back(*info);
|
|
|
|
}
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
ipc->SendOutMessageToClient(IpcId::TextDocumentDocumentSymbol,
|
|
|
|
response);
|
2017-03-29 06:33:38 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-04-23 20:19:09 +00:00
|
|
|
|
2017-05-21 04:30:59 +00:00
|
|
|
case IpcId::TextDocumentDocumentLink: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentDocumentLink>();
|
2017-05-21 04:30:59 +00:00
|
|
|
|
|
|
|
Out_TextDocumentDocumentLink response;
|
|
|
|
response.id = msg->id;
|
|
|
|
|
|
|
|
if (config->showDocumentLinksOnIncludes) {
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id,
|
|
|
|
msg->params.textDocument.uri.GetPath(), &file))
|
2017-05-21 04:30:59 +00:00
|
|
|
break;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file = working_files->GetFileByFilename(
|
|
|
|
msg->params.textDocument.uri.GetPath());
|
2017-05-21 04:30:59 +00:00
|
|
|
if (!working_file) {
|
2017-09-22 01:14:57 +00:00
|
|
|
LOG_S(INFO) << "Unable to find working file "
|
|
|
|
<< msg->params.textDocument.uri.GetPath();
|
2017-05-21 04:30:59 +00:00
|
|
|
break;
|
|
|
|
}
|
2017-10-17 05:44:58 +00:00
|
|
|
for (const IndexInclude& include : file->def->includes) {
|
2017-05-21 04:30:59 +00:00
|
|
|
optional<int> buffer_line;
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<std::string> buffer_line_content =
|
|
|
|
working_file->GetBufferLineContentFromIndexLine(include.line,
|
|
|
|
&buffer_line);
|
2017-05-21 04:30:59 +00:00
|
|
|
if (!buffer_line || !buffer_line_content)
|
|
|
|
continue;
|
|
|
|
|
2017-05-26 04:26:35 +00:00
|
|
|
// Subtract 1 from line because querydb stores 1-based lines but
|
|
|
|
// vscode expects 0-based lines.
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsRange> between_quotes =
|
|
|
|
ExtractQuotedRange(*buffer_line - 1, *buffer_line_content);
|
2017-05-26 04:26:35 +00:00
|
|
|
if (!between_quotes)
|
2017-05-21 04:30:59 +00:00
|
|
|
continue;
|
|
|
|
|
|
|
|
lsDocumentLink link;
|
|
|
|
link.target = lsDocumentUri::FromPath(include.resolved_path);
|
2017-05-26 04:26:35 +00:00
|
|
|
link.range = *between_quotes;
|
2017-05-21 04:30:59 +00:00
|
|
|
response.result.push_back(link);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ipc->SendOutMessageToClient(IpcId::TextDocumentDocumentLink, response);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2017-05-20 19:31:07 +00:00
|
|
|
case IpcId::TextDocumentCodeAction: {
|
|
|
|
// NOTE: This code snippet will generate some FixIts for testing:
|
|
|
|
//
|
|
|
|
// struct origin { int x, int y };
|
|
|
|
// void foo() {
|
|
|
|
// point origin = {
|
|
|
|
// x: 0.0,
|
|
|
|
// y: 0.0
|
|
|
|
// };
|
|
|
|
// }
|
|
|
|
//
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentCodeAction>();
|
2017-05-20 19:31:07 +00:00
|
|
|
|
2017-05-29 21:18:35 +00:00
|
|
|
QueryFileId file_id;
|
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file, &file_id))
|
2017-05-29 21:18:35 +00:00
|
|
|
break;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
WorkingFile* working_file = working_files->GetFileByFilename(
|
|
|
|
msg->params.textDocument.uri.GetPath());
|
2017-05-20 19:31:07 +00:00
|
|
|
if (!working_file) {
|
|
|
|
// TODO: send error response.
|
2017-09-22 01:14:57 +00:00
|
|
|
LOG_S(INFO)
|
|
|
|
<< "[error] textDocument/codeAction could not find working file";
|
2017-05-20 19:31:07 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
Out_TextDocumentCodeAction response;
|
|
|
|
response.id = msg->id;
|
|
|
|
|
2017-05-29 21:18:35 +00:00
|
|
|
// TODO: auto-insert namespace?
|
|
|
|
|
|
|
|
int default_line = (int)working_file->all_buffer_lines.size();
|
|
|
|
|
|
|
|
// Make sure to call EnsureImplFile before using these. We lazy load
|
|
|
|
// them because computing the values could involve an entire project
|
|
|
|
// scan.
|
|
|
|
optional<lsDocumentUri> impl_uri;
|
|
|
|
optional<QueryFileId> impl_file_id;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
std::vector<SymbolRef> syms =
|
|
|
|
FindSymbolsAtLocation(working_file, file, msg->params.range.start);
|
2017-05-29 21:18:35 +00:00
|
|
|
for (SymbolRef sym : syms) {
|
|
|
|
switch (sym.idx.kind) {
|
|
|
|
case SymbolKind::Type: {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryType& type = db->types[sym.idx.idx];
|
|
|
|
if (!type.def)
|
2017-05-29 21:18:35 +00:00
|
|
|
break;
|
|
|
|
|
|
|
|
int num_edits = 0;
|
|
|
|
|
|
|
|
// Get implementation file.
|
|
|
|
Out_TextDocumentCodeAction::Command command;
|
|
|
|
|
2017-10-17 05:44:58 +00:00
|
|
|
for (QueryFuncId func_id : type.def->funcs) {
|
|
|
|
QueryFunc& func_def = db->funcs[func_id.id];
|
|
|
|
if (!func_def.def || func_def.def->definition_extent)
|
2017-05-29 21:18:35 +00:00
|
|
|
continue;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
EnsureImplFile(db, file_id, impl_uri /*out*/,
|
|
|
|
impl_file_id /*out*/);
|
|
|
|
optional<lsTextEdit> edit = BuildAutoImplementForFunction(
|
|
|
|
db, working_files, working_file, default_line, file_id,
|
2017-10-17 05:44:58 +00:00
|
|
|
*impl_file_id, func_def);
|
2017-05-29 21:18:35 +00:00
|
|
|
if (!edit)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
++num_edits;
|
|
|
|
|
|
|
|
// Merge edits together if they are on the same line.
|
|
|
|
// TODO: be smarter about newline merging? ie, don't end up
|
|
|
|
// with foo()\n\n\n\nfoo(), we want foo()\n\nfoo()\n\n
|
|
|
|
//
|
|
|
|
if (!command.arguments.edits.empty() &&
|
2017-09-22 01:14:57 +00:00
|
|
|
command.arguments.edits[command.arguments.edits.size() - 1]
|
|
|
|
.range.end.line == edit->range.start.line) {
|
|
|
|
command.arguments.edits[command.arguments.edits.size() - 1]
|
|
|
|
.newText += edit->newText;
|
|
|
|
} else {
|
2017-05-29 21:18:35 +00:00
|
|
|
command.arguments.edits.push_back(*edit);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (command.arguments.edits.empty())
|
|
|
|
break;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
// If we're inserting at the end of the document, put a newline
|
|
|
|
// before the insertion.
|
2017-05-29 21:18:35 +00:00
|
|
|
if (command.arguments.edits[0].range.start.line >= default_line)
|
|
|
|
command.arguments.edits[0].newText.insert(0, "\n");
|
|
|
|
|
|
|
|
command.arguments.textDocumentUri = *impl_uri;
|
2017-09-22 01:14:57 +00:00
|
|
|
command.title = "Auto-Implement " + std::to_string(num_edits) +
|
2017-10-17 05:44:58 +00:00
|
|
|
" methods on " + type.def->short_name;
|
2017-05-29 21:18:35 +00:00
|
|
|
command.command = "cquery._autoImplement";
|
|
|
|
response.result.push_back(command);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case SymbolKind::Func: {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFunc& func = db->funcs[sym.idx.idx];
|
|
|
|
if (!func.def || func.def->definition_extent)
|
2017-05-29 21:18:35 +00:00
|
|
|
break;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
EnsureImplFile(db, file_id, impl_uri /*out*/,
|
|
|
|
impl_file_id /*out*/);
|
2017-05-29 21:18:35 +00:00
|
|
|
|
|
|
|
// Get implementation file.
|
|
|
|
Out_TextDocumentCodeAction::Command command;
|
2017-10-17 05:44:58 +00:00
|
|
|
command.title = "Auto-Implement " + func.def->short_name;
|
2017-05-29 21:18:35 +00:00
|
|
|
command.command = "cquery._autoImplement";
|
|
|
|
command.arguments.textDocumentUri = *impl_uri;
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsTextEdit> edit = BuildAutoImplementForFunction(
|
|
|
|
db, working_files, working_file, default_line, file_id,
|
2017-10-17 05:44:58 +00:00
|
|
|
*impl_file_id, func);
|
2017-05-29 21:18:35 +00:00
|
|
|
if (!edit)
|
|
|
|
break;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
// If we're inserting at the end of the document, put a newline
|
|
|
|
// before the insertion.
|
2017-05-29 21:18:35 +00:00
|
|
|
if (edit->range.start.line >= default_line)
|
|
|
|
edit->newText.insert(0, "\n");
|
|
|
|
command.arguments.edits.push_back(*edit);
|
|
|
|
response.result.push_back(command);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Only show one auto-impl section.
|
|
|
|
if (!response.result.empty())
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2017-06-14 06:29:41 +00:00
|
|
|
std::vector<lsDiagnostic> diagnostics;
|
2017-09-22 01:14:57 +00:00
|
|
|
working_files->DoAction(
|
|
|
|
[&]() { diagnostics = working_file->diagnostics_; });
|
2017-06-14 06:29:41 +00:00
|
|
|
for (lsDiagnostic& diag : diagnostics) {
|
2017-05-29 23:57:19 +00:00
|
|
|
if (diag.range.start.line != msg->params.range.start.line)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// For error diagnostics, provide an action to resolve an include.
|
|
|
|
// TODO: find a way to index diagnostic contents so line numbers
|
|
|
|
// don't get mismatched when actively editing a file.
|
2017-09-22 01:14:57 +00:00
|
|
|
std::string include_query =
|
|
|
|
LexWordAroundPos(diag.range.start, working_file->buffer_content);
|
|
|
|
if (diag.severity == lsDiagnosticSeverity::Error &&
|
|
|
|
!include_query.empty()) {
|
2017-05-29 23:57:19 +00:00
|
|
|
const size_t kMaxResults = 20;
|
|
|
|
|
|
|
|
std::unordered_set<std::string> include_absolute_paths;
|
|
|
|
|
|
|
|
// Find include candidate strings.
|
|
|
|
for (int i = 0; i < db->detailed_names.size(); ++i) {
|
|
|
|
if (include_absolute_paths.size() > kMaxResults)
|
|
|
|
break;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (db->detailed_names[i].find(include_query) ==
|
|
|
|
std::string::npos)
|
2017-05-29 23:57:19 +00:00
|
|
|
continue;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<QueryFileId> decl_file_id =
|
|
|
|
GetDeclarationFileForSymbol(db, db->symbols[i]);
|
2017-05-29 23:57:19 +00:00
|
|
|
if (!decl_file_id)
|
|
|
|
continue;
|
|
|
|
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFile& decl_file = db->files[decl_file_id->id];
|
|
|
|
if (!decl_file.def)
|
2017-05-29 23:57:19 +00:00
|
|
|
continue;
|
|
|
|
|
2017-10-17 05:44:58 +00:00
|
|
|
include_absolute_paths.insert(decl_file.def->path);
|
2017-05-29 23:57:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Build include strings.
|
2017-06-14 06:09:25 +00:00
|
|
|
std::unordered_set<std::string> include_insert_strings;
|
2017-05-29 23:57:19 +00:00
|
|
|
include_insert_strings.reserve(include_absolute_paths.size());
|
|
|
|
|
|
|
|
for (const std::string& path : include_absolute_paths) {
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<lsCompletionItem> item =
|
|
|
|
include_complete->FindCompletionItemForAbsolutePath(path);
|
2017-05-29 23:57:19 +00:00
|
|
|
if (!item)
|
|
|
|
continue;
|
|
|
|
if (item->textEdit)
|
2017-06-14 06:09:25 +00:00
|
|
|
include_insert_strings.insert(item->textEdit->newText);
|
2017-05-29 23:57:19 +00:00
|
|
|
else if (!item->insertText.empty())
|
2017-06-14 06:09:25 +00:00
|
|
|
include_insert_strings.insert(item->insertText);
|
2017-05-29 23:57:19 +00:00
|
|
|
else
|
2017-09-22 01:14:57 +00:00
|
|
|
assert(false &&
|
|
|
|
"unable to determine insert string for include "
|
|
|
|
"completion item");
|
2017-05-29 23:57:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Build code action.
|
|
|
|
if (!include_insert_strings.empty()) {
|
|
|
|
Out_TextDocumentCodeAction::Command command;
|
|
|
|
|
|
|
|
// Build edits.
|
2017-09-22 01:14:57 +00:00
|
|
|
for (const std::string& include_insert_string :
|
|
|
|
include_insert_strings) {
|
2017-05-29 23:57:19 +00:00
|
|
|
lsTextEdit edit;
|
2017-09-22 01:14:57 +00:00
|
|
|
optional<int> include_line = FindIncludeLine(
|
|
|
|
working_file->all_buffer_lines, include_insert_string);
|
2017-05-29 23:57:19 +00:00
|
|
|
if (!include_line)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
edit.range.start.line = *include_line;
|
|
|
|
edit.range.end.line = *include_line;
|
|
|
|
edit.newText = include_insert_string + "\n";
|
|
|
|
command.arguments.edits.push_back(edit);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Setup metadata and send to client.
|
|
|
|
if (include_insert_strings.size() == 1)
|
2017-06-14 06:09:25 +00:00
|
|
|
command.title = "Insert " + *include_insert_strings.begin();
|
2017-05-29 23:57:19 +00:00
|
|
|
else
|
2017-09-22 01:14:57 +00:00
|
|
|
command.title = "Pick one of " +
|
|
|
|
std::to_string(command.arguments.edits.size()) +
|
|
|
|
" includes to insert";
|
2017-05-29 23:57:19 +00:00
|
|
|
command.command = "cquery._insertInclude";
|
|
|
|
command.arguments.textDocumentUri = msg->params.textDocument.uri;
|
|
|
|
response.result.push_back(command);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// clang does not provide accurate enough column reporting for
|
2017-05-20 19:31:07 +00:00
|
|
|
// diagnostics to do good column filtering, so report all
|
|
|
|
// diagnostics on the line.
|
2017-05-29 23:57:19 +00:00
|
|
|
if (!diag.fixits_.empty()) {
|
2017-05-20 19:31:07 +00:00
|
|
|
Out_TextDocumentCodeAction::Command command;
|
|
|
|
command.title = "FixIt: " + diag.message;
|
|
|
|
command.command = "cquery._applyFixIt";
|
|
|
|
command.arguments.textDocumentUri = msg->params.textDocument.uri;
|
|
|
|
command.arguments.edits = diag.fixits_;
|
|
|
|
response.result.push_back(command);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ipc->SendOutMessageToClient(IpcId::TextDocumentCodeAction, response);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::TextDocumentCodeLens: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_TextDocumentCodeLens>();
|
2017-04-23 20:19:09 +00:00
|
|
|
|
|
|
|
Out_TextDocumentCodeLens response;
|
|
|
|
response.id = msg->id;
|
|
|
|
|
|
|
|
lsDocumentUri file_as_uri = msg->params.textDocument.uri;
|
2017-05-26 06:40:38 +00:00
|
|
|
std::string path = file_as_uri.GetPath();
|
|
|
|
|
2017-05-27 04:21:00 +00:00
|
|
|
clang_complete->NotifyView(path);
|
2017-04-23 20:19:09 +00:00
|
|
|
|
2017-05-27 07:10:21 +00:00
|
|
|
QueryFile* file;
|
2017-09-22 01:14:57 +00:00
|
|
|
if (!FindFileOrFail(db, msg->id, msg->params.textDocument.uri.GetPath(),
|
|
|
|
&file))
|
2017-03-29 06:33:38 +00:00
|
|
|
break;
|
2017-05-27 07:10:21 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
CommonCodeLensParams common;
|
|
|
|
common.result = &response.result;
|
|
|
|
common.db = db;
|
|
|
|
common.working_files = working_files;
|
2017-10-17 05:44:58 +00:00
|
|
|
common.working_file = working_files->GetFileByFilename(file->def->path);
|
2017-04-23 20:19:09 +00:00
|
|
|
|
2017-10-17 05:44:58 +00:00
|
|
|
for (SymbolRef ref : file->def->outline) {
|
2017-04-23 20:19:09 +00:00
|
|
|
// NOTE: We OffsetColumn so that the code lens always show up in a
|
|
|
|
// predictable order. Otherwise, the client may randomize it.
|
|
|
|
|
|
|
|
SymbolIdx symbol = ref.idx;
|
|
|
|
switch (symbol.kind) {
|
2017-09-22 01:14:57 +00:00
|
|
|
case SymbolKind::Type: {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryType& type = db->types[symbol.idx];
|
|
|
|
if (!type.def)
|
2017-09-22 01:14:57 +00:00
|
|
|
continue;
|
|
|
|
AddCodeLens("ref", "refs", &common, ref.loc.OffsetStartColumn(0),
|
2017-10-17 05:44:58 +00:00
|
|
|
type.uses, type.def->definition_spelling,
|
2017-09-22 01:14:57 +00:00
|
|
|
true /*force_display*/);
|
|
|
|
AddCodeLens("derived", "derived", &common,
|
|
|
|
ref.loc.OffsetStartColumn(1),
|
2017-10-17 05:44:58 +00:00
|
|
|
ToQueryLocation(db, type.derived), nullopt,
|
2017-09-22 01:14:57 +00:00
|
|
|
false /*force_display*/);
|
|
|
|
AddCodeLens("var", "vars", &common, ref.loc.OffsetStartColumn(2),
|
2017-10-17 05:44:58 +00:00
|
|
|
ToQueryLocation(db, type.instances), nullopt,
|
2017-09-22 01:14:57 +00:00
|
|
|
false /*force_display*/);
|
|
|
|
break;
|
2017-04-23 20:19:09 +00:00
|
|
|
}
|
2017-09-22 01:14:57 +00:00
|
|
|
case SymbolKind::Func: {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryFunc& func = db->funcs[symbol.idx];
|
|
|
|
if (!func.def)
|
2017-09-22 01:14:57 +00:00
|
|
|
continue;
|
2017-04-11 07:29:36 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
int16_t offset = 0;
|
|
|
|
|
|
|
|
std::vector<QueryFuncRef> base_callers =
|
2017-10-17 05:44:58 +00:00
|
|
|
GetCallersForAllBaseFunctions(db, func);
|
2017-09-22 01:14:57 +00:00
|
|
|
std::vector<QueryFuncRef> derived_callers =
|
2017-10-17 05:44:58 +00:00
|
|
|
GetCallersForAllDerivedFunctions(db, func);
|
2017-09-22 01:14:57 +00:00
|
|
|
if (base_callers.empty() && derived_callers.empty()) {
|
|
|
|
AddCodeLens("call", "calls", &common,
|
|
|
|
ref.loc.OffsetStartColumn(offset++),
|
2017-10-17 05:44:58 +00:00
|
|
|
ToQueryLocation(db, func.callers), nullopt,
|
2017-09-22 01:14:57 +00:00
|
|
|
true /*force_display*/);
|
|
|
|
} else {
|
|
|
|
AddCodeLens("direct call", "direct calls", &common,
|
|
|
|
ref.loc.OffsetStartColumn(offset++),
|
2017-10-17 05:44:58 +00:00
|
|
|
ToQueryLocation(db, func.callers), nullopt,
|
2017-09-22 01:14:57 +00:00
|
|
|
false /*force_display*/);
|
|
|
|
if (!base_callers.empty())
|
|
|
|
AddCodeLens("base call", "base calls", &common,
|
|
|
|
ref.loc.OffsetStartColumn(offset++),
|
|
|
|
ToQueryLocation(db, base_callers), nullopt,
|
|
|
|
false /*force_display*/);
|
|
|
|
if (!derived_callers.empty())
|
|
|
|
AddCodeLens("derived call", "derived calls", &common,
|
|
|
|
ref.loc.OffsetStartColumn(offset++),
|
|
|
|
ToQueryLocation(db, derived_callers), nullopt,
|
|
|
|
false /*force_display*/);
|
|
|
|
}
|
|
|
|
|
|
|
|
AddCodeLens("derived", "derived", &common,
|
|
|
|
ref.loc.OffsetStartColumn(offset++),
|
2017-10-17 05:44:58 +00:00
|
|
|
ToQueryLocation(db, func.derived), nullopt,
|
2017-09-22 01:14:57 +00:00
|
|
|
false /*force_display*/);
|
|
|
|
|
|
|
|
// "Base"
|
|
|
|
optional<QueryLocation> base_loc =
|
2017-10-17 05:44:58 +00:00
|
|
|
GetBaseDefinitionOrDeclarationSpelling(db, func);
|
2017-09-22 01:14:57 +00:00
|
|
|
if (base_loc) {
|
|
|
|
optional<lsLocation> ls_base =
|
|
|
|
GetLsLocation(db, working_files, *base_loc);
|
|
|
|
if (ls_base) {
|
|
|
|
optional<lsRange> range =
|
|
|
|
GetLsRange(common.working_file, ref.loc.range);
|
|
|
|
if (range) {
|
|
|
|
TCodeLens code_lens;
|
|
|
|
code_lens.range = *range;
|
|
|
|
code_lens.range.start.character += offset++;
|
|
|
|
code_lens.command = lsCommand<lsCodeLensCommandArguments>();
|
|
|
|
code_lens.command->title = "Base";
|
|
|
|
code_lens.command->command = "cquery.goto";
|
|
|
|
code_lens.command->arguments.uri = ls_base->uri;
|
|
|
|
code_lens.command->arguments.position =
|
|
|
|
ls_base->range.start;
|
|
|
|
response.result.push_back(code_lens);
|
|
|
|
}
|
2017-04-23 20:19:09 +00:00
|
|
|
}
|
2017-04-11 08:43:35 +00:00
|
|
|
}
|
2017-04-23 20:19:09 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case SymbolKind::Var: {
|
2017-10-17 05:44:58 +00:00
|
|
|
QueryVar& var = db->vars[symbol.idx];
|
|
|
|
if (!var.def)
|
2017-09-22 01:14:57 +00:00
|
|
|
continue;
|
2017-04-11 08:43:35 +00:00
|
|
|
|
2017-10-17 05:44:58 +00:00
|
|
|
if (var.def->is_local && !config->codeLensOnLocalVariables)
|
2017-09-22 01:14:57 +00:00
|
|
|
continue;
|
2017-05-21 01:26:50 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
bool force_display = true;
|
|
|
|
// Do not show 0 refs on macro with no uses, as it is most likely
|
|
|
|
// a header guard.
|
2017-10-17 05:44:58 +00:00
|
|
|
if (var.def->is_macro)
|
2017-09-22 01:14:57 +00:00
|
|
|
force_display = false;
|
2017-06-29 06:34:04 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
AddCodeLens("ref", "refs", &common, ref.loc.OffsetStartColumn(0),
|
2017-10-17 05:44:58 +00:00
|
|
|
var.uses, var.def->definition_spelling,
|
2017-09-22 01:14:57 +00:00
|
|
|
force_display);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case SymbolKind::File:
|
|
|
|
case SymbolKind::Invalid: {
|
|
|
|
assert(false && "unexpected");
|
|
|
|
break;
|
|
|
|
}
|
2017-04-23 20:19:09 +00:00
|
|
|
};
|
2017-03-29 06:33:38 +00:00
|
|
|
}
|
2017-04-22 07:32:29 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
ipc->SendOutMessageToClient(IpcId::TextDocumentCodeLens, response);
|
2017-10-28 22:09:14 +00:00
|
|
|
|
|
|
|
// TODO: We need to move this to a separate request, as the user may
|
|
|
|
// have turned code lens off (ie, a custom DidView notification).
|
|
|
|
if (file && file->def) {
|
2017-11-09 07:06:32 +00:00
|
|
|
WorkingFile* working_file =
|
|
|
|
working_files->GetFileByFilename(file->def->path);
|
|
|
|
EmitInactiveLines(working_file, file->def->inactive_regions);
|
|
|
|
// Do not emit semantic highlighting information here, as it has not
|
|
|
|
// been updated.
|
2017-10-28 22:09:14 +00:00
|
|
|
}
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
break;
|
2017-03-17 07:58:41 +00:00
|
|
|
}
|
2017-03-06 08:48:51 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
case IpcId::WorkspaceSymbol: {
|
2017-09-22 01:14:57 +00:00
|
|
|
// TODO: implement fuzzy search, see
|
|
|
|
// https://github.com/junegunn/fzf/blob/master/src/matcher.go for
|
|
|
|
// inspiration
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_WorkspaceSymbol>();
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
Out_WorkspaceSymbol response;
|
|
|
|
response.id = msg->id;
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-07-28 02:14:33 +00:00
|
|
|
LOG_S(INFO) << "[querydb] Considering " << db->detailed_names.size()
|
2017-09-22 01:14:57 +00:00
|
|
|
<< " candidates for query " << msg->params.query;
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
std::string query = msg->params.query;
|
2017-06-29 06:59:38 +00:00
|
|
|
|
|
|
|
std::unordered_set<std::string> inserted_results;
|
|
|
|
inserted_results.reserve(config->maxWorkspaceSearchResults);
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
for (int i = 0; i < db->detailed_names.size(); ++i) {
|
2017-06-17 02:42:14 +00:00
|
|
|
if (db->detailed_names[i].find(query) != std::string::npos) {
|
2017-06-29 06:59:38 +00:00
|
|
|
// Do not show the same entry twice.
|
|
|
|
if (!inserted_results.insert(db->detailed_names[i]).second)
|
|
|
|
continue;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
InsertSymbolIntoResult(db, working_files, db->symbols[i],
|
|
|
|
&response.result);
|
2017-06-17 02:42:14 +00:00
|
|
|
if (response.result.size() >= config->maxWorkspaceSearchResults)
|
|
|
|
break;
|
2017-04-23 20:19:09 +00:00
|
|
|
}
|
2017-06-17 02:42:14 +00:00
|
|
|
}
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-06-17 02:42:14 +00:00
|
|
|
if (response.result.size() < config->maxWorkspaceSearchResults) {
|
|
|
|
for (int i = 0; i < db->detailed_names.size(); ++i) {
|
|
|
|
if (SubstringMatch(query, db->detailed_names[i])) {
|
2017-06-29 06:59:38 +00:00
|
|
|
// Do not show the same entry twice.
|
|
|
|
if (!inserted_results.insert(db->detailed_names[i]).second)
|
|
|
|
continue;
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
InsertSymbolIntoResult(db, working_files, db->symbols[i],
|
|
|
|
&response.result);
|
2017-06-17 02:42:14 +00:00
|
|
|
if (response.result.size() >= config->maxWorkspaceSearchResults)
|
|
|
|
break;
|
2017-04-23 20:19:09 +00:00
|
|
|
}
|
2017-04-12 07:19:49 +00:00
|
|
|
}
|
2017-04-09 22:16:06 +00:00
|
|
|
}
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
LOG_S(INFO) << "[querydb] Found " << response.result.size()
|
|
|
|
<< " results for query " << query;
|
2017-04-23 20:19:09 +00:00
|
|
|
ipc->SendOutMessageToClient(IpcId::WorkspaceSymbol, response);
|
|
|
|
break;
|
|
|
|
}
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-09-13 03:35:27 +00:00
|
|
|
case IpcId::CqueryIndexFile: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_CqueryIndexFile>();
|
2017-09-22 01:14:57 +00:00
|
|
|
queue->index_request.Enqueue(
|
|
|
|
Index_Request(NormalizePath(msg->params.path), msg->params.args,
|
|
|
|
msg->params.is_interactive, msg->params.contents));
|
2017-09-13 03:35:27 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case IpcId::CqueryQueryDbWaitForIdleIndexer: {
|
|
|
|
LOG_S(INFO) << "Waiting for idle";
|
|
|
|
int idle_count = 0;
|
|
|
|
while (true) {
|
|
|
|
bool has_work = false;
|
2017-09-20 05:08:17 +00:00
|
|
|
has_work |= import_manager->HasActiveQuerydbImports();
|
2017-09-13 03:35:27 +00:00
|
|
|
has_work |= queue->HasWork();
|
2017-09-22 01:14:57 +00:00
|
|
|
has_work |= QueryDb_ImportMain(config, db, import_manager, queue,
|
|
|
|
working_files);
|
2017-09-13 03:35:27 +00:00
|
|
|
if (!has_work)
|
|
|
|
++idle_count;
|
|
|
|
else
|
|
|
|
idle_count = 0;
|
|
|
|
|
|
|
|
// There are race conditions between each of the three checks above,
|
|
|
|
// so we retry a bunch of times to try to avoid any.
|
|
|
|
if (idle_count > 10)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
LOG_S(INFO) << "Done waiting for idle";
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case IpcId::CqueryExitWhenIdle: {
|
|
|
|
*exit_when_idle = true;
|
|
|
|
WorkThread::request_exit_on_idle = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2017-04-23 20:19:09 +00:00
|
|
|
default: {
|
2017-11-18 19:02:09 +00:00
|
|
|
LOG_S(FATAL) << "Exiting; unhandled IPC message "
|
|
|
|
<< IpcIdToString(message->method_id);
|
2017-04-23 20:19:09 +00:00
|
|
|
exit(1);
|
|
|
|
}
|
2017-03-15 04:59:05 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
// TODO: consider rate-limiting and checking for IPC messages so we don't
|
|
|
|
// block requests / we can serve partial requests.
|
2017-04-08 06:45:28 +00:00
|
|
|
|
2017-08-17 03:39:00 +00:00
|
|
|
if (QueryDb_ImportMain(config, db, import_manager, queue, working_files))
|
2017-04-23 22:45:40 +00:00
|
|
|
did_work = true;
|
2017-04-08 20:00:08 +00:00
|
|
|
|
2017-04-23 22:45:40 +00:00
|
|
|
return did_work;
|
2017-03-05 02:16:23 +00:00
|
|
|
}
|
2017-03-15 04:59:05 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
void RunQueryDbThread(const std::string& bin_name,
|
|
|
|
Config* config,
|
|
|
|
MultiQueueWaiter* waiter,
|
|
|
|
QueueManager* queue) {
|
2017-09-13 03:35:27 +00:00
|
|
|
bool exit_when_idle = false;
|
2017-03-26 21:40:34 +00:00
|
|
|
Project project;
|
|
|
|
WorkingFiles working_files;
|
2017-09-27 06:03:43 +00:00
|
|
|
FileConsumer::SharedState file_consumer_shared;
|
|
|
|
|
2017-06-10 04:13:16 +00:00
|
|
|
ClangCompleteManager clang_complete(
|
2017-09-22 01:14:57 +00:00
|
|
|
config, &project, &working_files,
|
|
|
|
std::bind(&EmitDiagnostics, &working_files, std::placeholders::_1,
|
2017-09-27 06:03:43 +00:00
|
|
|
std::placeholders::_2),
|
|
|
|
std::bind(&IndexWithTuFromCodeCompletion, queue, &file_consumer_shared,
|
|
|
|
std::placeholders::_1, std::placeholders::_2,
|
|
|
|
std::placeholders::_3, std::placeholders::_4));
|
|
|
|
|
2017-05-27 04:21:00 +00:00
|
|
|
IncludeComplete include_complete(config, &project);
|
2017-06-30 06:51:22 +00:00
|
|
|
auto global_code_complete_cache = MakeUnique<CodeCompleteCache>();
|
|
|
|
auto non_global_code_complete_cache = MakeUnique<CodeCompleteCache>();
|
|
|
|
auto signature_cache = MakeUnique<CodeCompleteCache>();
|
2017-08-17 03:39:00 +00:00
|
|
|
ImportManager import_manager;
|
2017-08-16 03:54:59 +00:00
|
|
|
TimestampManager timestamp_manager;
|
2017-03-25 19:18:25 +00:00
|
|
|
|
|
|
|
// Run query db main loop.
|
2017-04-19 00:05:14 +00:00
|
|
|
SetCurrentThreadName("querydb");
|
2017-04-15 05:41:35 +00:00
|
|
|
QueryDatabase db;
|
2017-03-15 04:59:05 +00:00
|
|
|
while (true) {
|
2017-05-21 19:51:15 +00:00
|
|
|
bool did_work = QueryDbMainLoop(
|
2017-09-22 01:14:57 +00:00
|
|
|
config, &db, &exit_when_idle, waiter, queue, &project,
|
|
|
|
&file_consumer_shared, &import_manager, ×tamp_manager,
|
|
|
|
&working_files, &clang_complete, &include_complete,
|
|
|
|
global_code_complete_cache.get(), non_global_code_complete_cache.get(),
|
|
|
|
signature_cache.get());
|
2017-08-17 18:02:47 +00:00
|
|
|
|
2017-09-13 03:35:27 +00:00
|
|
|
// No more work left and exit request. Exit.
|
2017-11-18 19:02:09 +00:00
|
|
|
if (!did_work && exit_when_idle && WorkThread::num_active_threads == 0) {
|
|
|
|
LOG_S(INFO) << "Exiting; exit_when_idle is set and there is no more work";
|
2017-09-13 03:35:27 +00:00
|
|
|
exit(0);
|
2017-11-18 19:02:09 +00:00
|
|
|
}
|
2017-09-13 03:35:27 +00:00
|
|
|
|
2017-08-17 18:02:47 +00:00
|
|
|
// Cleanup and free any unused memory.
|
|
|
|
FreeUnusedMemory();
|
|
|
|
|
2017-04-23 22:45:40 +00:00
|
|
|
if (!did_work) {
|
2017-09-22 01:14:57 +00:00
|
|
|
waiter->Wait({IpcManager::instance()->threaded_queue_for_server_.get(),
|
|
|
|
&queue->do_id_map, &queue->on_indexed});
|
2017-04-23 22:45:40 +00:00
|
|
|
}
|
2017-03-15 04:59:05 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-22 01:32:55 +00:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
// STDIN MAIN //////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
2017-03-05 19:48:05 +00:00
|
|
|
// Separate thread whose only job is to read from stdin and
|
|
|
|
// dispatch read commands to the actual indexer program. This
|
|
|
|
// cannot be done on the main thread because reading from std::cin
|
|
|
|
// blocks.
|
|
|
|
//
|
|
|
|
// |ipc| is connected to a server.
|
2017-09-22 01:14:57 +00:00
|
|
|
void LaunchStdinLoop(Config* config,
|
|
|
|
std::unordered_map<IpcId, Timer>* request_times) {
|
2017-11-17 09:30:58 +00:00
|
|
|
WorkThread::StartThread("stdin", [request_times]() {
|
2017-09-13 03:35:27 +00:00
|
|
|
IpcManager* ipc = IpcManager::instance();
|
2017-04-16 21:49:48 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
std::unique_ptr<BaseIpcMessage> message =
|
|
|
|
MessageRegistry::instance()->ReadMessageFromStdin();
|
2017-03-05 02:16:23 +00:00
|
|
|
|
|
|
|
// Message parsing can fail if we don't recognize the method.
|
|
|
|
if (!message)
|
2017-09-13 03:35:27 +00:00
|
|
|
return WorkThread::Result::MoreWork;
|
2017-03-05 02:16:23 +00:00
|
|
|
|
2017-04-21 07:46:51 +00:00
|
|
|
(*request_times)[message->method_id] = Timer();
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
// std::cerr << "[stdin] Got message " << IpcIdToString(message->method_id)
|
|
|
|
// << std::endl;
|
2017-03-05 02:16:23 +00:00
|
|
|
switch (message->method_id) {
|
2017-09-22 01:14:57 +00:00
|
|
|
case IpcId::Initialized: {
|
|
|
|
// TODO: don't send output until we get this notification
|
|
|
|
break;
|
|
|
|
}
|
2017-03-26 06:47:59 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
case IpcId::CancelRequest: {
|
|
|
|
// TODO: support cancellation
|
|
|
|
break;
|
|
|
|
}
|
2017-03-26 06:47:59 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
case IpcId::Exit: {
|
2017-11-18 19:02:09 +00:00
|
|
|
LOG_S(INFO) << "Exiting";
|
2017-09-22 01:14:57 +00:00
|
|
|
exit(0);
|
|
|
|
break;
|
|
|
|
}
|
2017-09-13 03:35:27 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
case IpcId::CqueryExitWhenIdle: {
|
|
|
|
// querydb needs to know to exit when idle. We return out of the stdin
|
|
|
|
// loop to exit the thread. If we keep parsing input stdin is likely
|
|
|
|
// closed so cquery will exit.
|
|
|
|
LOG_S(INFO) << "cquery will exit when all threads are idle";
|
|
|
|
ipc->SendMessage(IpcManager::Destination::Server, std::move(message));
|
|
|
|
return WorkThread::Result::ExitThread;
|
|
|
|
}
|
2017-09-13 03:35:27 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
case IpcId::Initialize:
|
|
|
|
case IpcId::TextDocumentDidOpen:
|
|
|
|
case IpcId::TextDocumentDidChange:
|
|
|
|
case IpcId::TextDocumentDidClose:
|
|
|
|
case IpcId::TextDocumentDidSave:
|
|
|
|
case IpcId::TextDocumentRename:
|
|
|
|
case IpcId::TextDocumentCompletion:
|
|
|
|
case IpcId::TextDocumentSignatureHelp:
|
|
|
|
case IpcId::TextDocumentDefinition:
|
|
|
|
case IpcId::TextDocumentDocumentHighlight:
|
|
|
|
case IpcId::TextDocumentHover:
|
|
|
|
case IpcId::TextDocumentReferences:
|
|
|
|
case IpcId::TextDocumentDocumentSymbol:
|
|
|
|
case IpcId::TextDocumentDocumentLink:
|
|
|
|
case IpcId::TextDocumentCodeAction:
|
|
|
|
case IpcId::TextDocumentCodeLens:
|
|
|
|
case IpcId::WorkspaceSymbol:
|
|
|
|
case IpcId::CqueryFreshenIndex:
|
|
|
|
case IpcId::CqueryTypeHierarchyTree:
|
|
|
|
case IpcId::CqueryCallTreeInitial:
|
|
|
|
case IpcId::CqueryCallTreeExpand:
|
|
|
|
case IpcId::CqueryVars:
|
|
|
|
case IpcId::CqueryCallers:
|
|
|
|
case IpcId::CqueryBase:
|
|
|
|
case IpcId::CqueryDerived:
|
|
|
|
case IpcId::CqueryIndexFile:
|
|
|
|
case IpcId::CqueryQueryDbWaitForIdleIndexer: {
|
|
|
|
ipc->SendMessage(IpcManager::Destination::Server, std::move(message));
|
|
|
|
break;
|
|
|
|
}
|
2017-03-26 06:47:59 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
default: {
|
2017-11-18 19:02:09 +00:00
|
|
|
LOG_S(ERROR) << "Unhandled IPC message "
|
|
|
|
<< IpcIdToString(message->method_id);
|
2017-09-22 01:14:57 +00:00
|
|
|
exit(1);
|
|
|
|
}
|
2017-03-03 08:12:11 +00:00
|
|
|
}
|
2017-09-13 03:35:27 +00:00
|
|
|
|
|
|
|
return WorkThread::Result::MoreWork;
|
|
|
|
});
|
2017-03-03 08:12:11 +00:00
|
|
|
}
|
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
void LaunchStdoutThread(std::unordered_map<IpcId, Timer>* request_times,
|
|
|
|
MultiQueueWaiter* waiter,
|
|
|
|
QueueManager* queue) {
|
2017-09-13 03:35:27 +00:00
|
|
|
WorkThread::StartThread("stdout", [=]() {
|
|
|
|
IpcManager* ipc = IpcManager::instance();
|
2017-04-16 19:02:29 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
std::vector<std::unique_ptr<BaseIpcMessage>> messages =
|
|
|
|
ipc->GetMessages(IpcManager::Destination::Client);
|
2017-04-23 22:45:40 +00:00
|
|
|
if (messages.empty()) {
|
2017-09-22 01:14:57 +00:00
|
|
|
waiter->Wait({ipc->threaded_queue_for_client_.get()});
|
|
|
|
return queue->HasWork() ? WorkThread::Result::MoreWork
|
|
|
|
: WorkThread::Result::NoWork;
|
2017-04-23 22:45:40 +00:00
|
|
|
}
|
2017-04-16 19:02:29 +00:00
|
|
|
|
2017-04-23 22:45:40 +00:00
|
|
|
for (auto& message : messages) {
|
2017-09-22 01:14:57 +00:00
|
|
|
// std::cerr << "[stdout] Processing message " <<
|
|
|
|
// IpcIdToString(message->method_id) << std::endl;
|
2017-04-22 06:15:46 +00:00
|
|
|
|
2017-04-23 22:45:40 +00:00
|
|
|
switch (message->method_id) {
|
|
|
|
case IpcId::Cout: {
|
2017-09-22 01:32:55 +00:00
|
|
|
auto msg = message->As<Ipc_Cout>();
|
2017-04-21 07:46:51 +00:00
|
|
|
|
2017-06-14 06:59:40 +00:00
|
|
|
if (ShouldDisplayIpcTiming(msg->original_ipc_id)) {
|
2017-05-20 08:07:29 +00:00
|
|
|
Timer time = (*request_times)[msg->original_ipc_id];
|
2017-09-22 01:14:57 +00:00
|
|
|
time.ResetAndPrint("[e2e] Running " + std::string(IpcIdToString(
|
|
|
|
msg->original_ipc_id)));
|
2017-05-20 08:07:29 +00:00
|
|
|
}
|
2017-04-16 19:02:29 +00:00
|
|
|
|
2017-04-23 22:45:40 +00:00
|
|
|
std::cout << msg->content;
|
|
|
|
std::cout.flush();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
default: {
|
2017-11-18 19:02:09 +00:00
|
|
|
LOG_S(FATAL) << "Exiting; unhandled IPC message "
|
|
|
|
<< IpcIdToString(message->method_id);
|
2017-04-23 22:45:40 +00:00
|
|
|
exit(1);
|
|
|
|
}
|
2017-04-16 23:57:31 +00:00
|
|
|
}
|
|
|
|
}
|
2017-09-13 03:35:27 +00:00
|
|
|
|
|
|
|
return WorkThread::Result::MoreWork;
|
|
|
|
});
|
2017-04-16 23:57:31 +00:00
|
|
|
}
|
2017-04-16 19:02:29 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
void LanguageServerMain(const std::string& bin_name,
|
|
|
|
Config* config,
|
|
|
|
MultiQueueWaiter* waiter) {
|
2017-09-13 03:35:27 +00:00
|
|
|
QueueManager queue(waiter);
|
2017-04-21 07:46:51 +00:00
|
|
|
std::unordered_map<IpcId, Timer> request_times;
|
|
|
|
|
Fix hang during reading from stdin
I experienced this hang problem when using cquery with
LanguageClient-neovim.
Sometimes std::cout would hang because the pipe is full, which would
normally be fine, since the client would read from the pipe soon.
However, in this case the client is blocking on a write(). This
shouldn't happen, because cquery has a stdin thread which constantly
reads from stdin. But, in C++, cin and cout are tied streams. Reading
from cin would cause cout to flush, which cause the read to block.
So, cquery can't write because the client doesn't read, the client won't
read before it finishes writing. It can't finish writing because cquery
can't read, and cquery can't read because cquery can't write. Which is a
deadlock.
The solution is to simply untie cin and cout.
2017-11-28 18:17:50 +00:00
|
|
|
std::cin.tie(NULL);
|
2017-09-13 03:35:27 +00:00
|
|
|
LaunchStdinLoop(config, &request_times);
|
2017-04-23 20:19:09 +00:00
|
|
|
|
|
|
|
// We run a dedicated thread for writing to stdout because there can be an
|
|
|
|
// unknown number of delays when output information.
|
2017-09-13 03:35:27 +00:00
|
|
|
LaunchStdoutThread(&request_times, waiter, &queue);
|
|
|
|
|
|
|
|
// Start querydb which takes over this thread. The querydb will launch
|
|
|
|
// indexer threads as needed.
|
|
|
|
RunQueryDbThread(bin_name, config, waiter, &queue);
|
2017-03-16 07:36:49 +00:00
|
|
|
}
|
2017-03-03 08:12:11 +00:00
|
|
|
|
2017-09-22 01:32:55 +00:00
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
|
|
|
// MAIN ////////////////////////////////////////////////////////////////////////
|
|
|
|
////////////////////////////////////////////////////////////////////////////////
|
2017-03-25 01:28:09 +00:00
|
|
|
int main(int argc, char** argv) {
|
2017-07-28 02:14:33 +00:00
|
|
|
loguru::init(argc, argv);
|
2017-10-28 22:09:14 +00:00
|
|
|
loguru::add_file("cquery_diagnostics.log", loguru::Truncate,
|
|
|
|
loguru::Verbosity_MAX);
|
2017-07-28 02:14:33 +00:00
|
|
|
loguru::g_flush_interval_ms = 0;
|
2017-10-28 21:31:54 +00:00
|
|
|
loguru::g_stderr_verbosity = 1;
|
2017-07-28 02:14:33 +00:00
|
|
|
|
2017-04-23 22:45:40 +00:00
|
|
|
MultiQueueWaiter waiter;
|
|
|
|
IpcManager::CreateInstance(&waiter);
|
2017-04-16 22:48:54 +00:00
|
|
|
|
2017-09-22 01:14:57 +00:00
|
|
|
// bool loop = true;
|
|
|
|
// while (loop)
|
2017-04-03 01:34:15 +00:00
|
|
|
// std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
2017-09-22 01:14:57 +00:00
|
|
|
// std::this_thread::sleep_for(std::chrono::seconds(10));
|
2017-03-03 08:12:11 +00:00
|
|
|
|
2017-03-25 20:27:28 +00:00
|
|
|
PlatformInit();
|
2017-04-16 21:51:47 +00:00
|
|
|
IndexInit();
|
|
|
|
|
2017-03-25 19:18:25 +00:00
|
|
|
RegisterMessageTypes();
|
2017-03-15 04:59:05 +00:00
|
|
|
|
2017-03-25 19:18:25 +00:00
|
|
|
std::unordered_map<std::string, std::string> options =
|
2017-09-22 01:14:57 +00:00
|
|
|
ParseOptions(argc, argv);
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-11-19 22:35:16 +00:00
|
|
|
bool print_help = true;
|
|
|
|
|
|
|
|
if (HasOption(options, "--test-unit")) {
|
|
|
|
print_help = false;
|
2017-03-17 23:45:10 +00:00
|
|
|
doctest::Context context;
|
|
|
|
context.applyCommandLine(argc, argv);
|
|
|
|
int res = context.run();
|
|
|
|
if (context.shouldExit())
|
2017-03-25 19:18:25 +00:00
|
|
|
return res;
|
2017-11-19 22:35:16 +00:00
|
|
|
}
|
2017-03-17 23:45:10 +00:00
|
|
|
|
2017-11-19 22:35:16 +00:00
|
|
|
if (HasOption(options, "--test-index")) {
|
|
|
|
print_help = false;
|
|
|
|
RunIndexTests();
|
2017-05-25 02:04:19 +00:00
|
|
|
std::cerr << std::endl << "[Enter] to exit" << std::endl;
|
|
|
|
std::cin.get();
|
2017-11-19 22:35:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (HasOption(options, "--language-server")) {
|
|
|
|
print_help = false;
|
2017-09-22 01:14:57 +00:00
|
|
|
// std::cerr << "Running language server" << std::endl;
|
2017-09-24 00:36:28 +00:00
|
|
|
auto config = MakeUnique<Config>();
|
|
|
|
LanguageServerMain(argv[0], config.get(), &waiter);
|
2017-04-23 19:45:58 +00:00
|
|
|
return 0;
|
2017-11-19 22:35:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (print_help) {
|
2017-04-23 19:45:58 +00:00
|
|
|
std::cout << R"help(cquery help:
|
2017-03-25 19:18:25 +00:00
|
|
|
|
2017-04-23 19:45:58 +00:00
|
|
|
cquery is a low-latency C++ language server.
|
2017-03-25 19:18:25 +00:00
|
|
|
|
|
|
|
General:
|
|
|
|
--help Print this help information.
|
|
|
|
--language-server
|
2017-04-23 19:45:58 +00:00
|
|
|
Run as a language server. This implements the language
|
|
|
|
server spec over STDIN and STDOUT.
|
2017-11-19 22:35:16 +00:00
|
|
|
--test-unit Run unit tests.
|
|
|
|
--test-index Run index tests.
|
2017-03-25 19:18:25 +00:00
|
|
|
|
|
|
|
Configuration:
|
2017-04-23 19:45:58 +00:00
|
|
|
When opening up a directory, cquery will look for a compile_commands.json
|
|
|
|
file emitted by your preferred build system. If not present, cquery will
|
|
|
|
use a recursive directory listing instead. Command line flags can be
|
2017-11-27 06:56:23 +00:00
|
|
|
provided by adding a file named `.cquery` in the top-level directory. Each
|
2017-04-23 19:45:58 +00:00
|
|
|
line in that file is a separate argument.
|
|
|
|
|
|
|
|
There are also a number of configuration options available when
|
|
|
|
initializing the language server - your editor should have tooling to
|
2017-05-21 19:51:15 +00:00
|
|
|
describe those options. See |Config| in this source code for a detailed
|
|
|
|
list of all currently supported options.
|
2017-03-25 19:18:25 +00:00
|
|
|
)help";
|
2017-02-25 23:59:09 +00:00
|
|
|
}
|
2017-11-19 22:35:16 +00:00
|
|
|
|
|
|
|
return 0;
|
2017-02-25 23:59:09 +00:00
|
|
|
}
|
2017-05-29 21:18:35 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_SUITE("LexFunctionDeclaration") {
|
|
|
|
TEST_CASE("simple") {
|
|
|
|
std::string buffer_content = " void Foo(); ";
|
|
|
|
lsPosition declaration = CharPos(buffer_content, 'F');
|
|
|
|
std::string insert_text;
|
|
|
|
int newlines_after_name = 0;
|
2017-05-29 21:18:35 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
LexFunctionDeclaration(buffer_content, declaration, nullopt, &insert_text,
|
2017-11-19 22:11:54 +00:00
|
|
|
&newlines_after_name);
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(insert_text == "void Foo() {\n}");
|
|
|
|
REQUIRE(newlines_after_name == 0);
|
2017-05-29 21:18:35 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
LexFunctionDeclaration(buffer_content, declaration, std::string("Type"),
|
2017-11-19 22:11:54 +00:00
|
|
|
&insert_text, &newlines_after_name);
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(insert_text == "void Type::Foo() {\n}");
|
|
|
|
REQUIRE(newlines_after_name == 0);
|
|
|
|
}
|
2017-05-29 21:18:35 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_CASE("ctor") {
|
|
|
|
std::string buffer_content = " Foo(); ";
|
|
|
|
lsPosition declaration = CharPos(buffer_content, 'F');
|
|
|
|
std::string insert_text;
|
|
|
|
int newlines_after_name = 0;
|
2017-05-29 21:18:35 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
LexFunctionDeclaration(buffer_content, declaration, std::string("Foo"),
|
2017-11-19 22:11:54 +00:00
|
|
|
&insert_text, &newlines_after_name);
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(insert_text == "Foo::Foo() {\n}");
|
|
|
|
REQUIRE(newlines_after_name == 0);
|
|
|
|
}
|
2017-05-29 21:18:35 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_CASE("dtor") {
|
|
|
|
std::string buffer_content = " ~Foo(); ";
|
|
|
|
lsPosition declaration = CharPos(buffer_content, '~');
|
|
|
|
std::string insert_text;
|
|
|
|
int newlines_after_name = 0;
|
2017-05-29 21:18:35 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
LexFunctionDeclaration(buffer_content, declaration, std::string("Foo"),
|
2017-11-19 22:11:54 +00:00
|
|
|
&insert_text, &newlines_after_name);
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(insert_text == "Foo::~Foo() {\n}");
|
|
|
|
REQUIRE(newlines_after_name == 0);
|
|
|
|
}
|
2017-05-29 21:18:35 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_CASE("complex return type") {
|
|
|
|
std::string buffer_content = " std::vector<int> Foo(); ";
|
|
|
|
lsPosition declaration = CharPos(buffer_content, 'F');
|
|
|
|
std::string insert_text;
|
|
|
|
int newlines_after_name = 0;
|
2017-05-29 21:18:35 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
LexFunctionDeclaration(buffer_content, declaration, nullopt, &insert_text,
|
2017-11-19 22:11:54 +00:00
|
|
|
&newlines_after_name);
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(insert_text == "std::vector<int> Foo() {\n}");
|
|
|
|
REQUIRE(newlines_after_name == 0);
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
LexFunctionDeclaration(buffer_content, declaration, std::string("Type"),
|
2017-11-19 22:11:54 +00:00
|
|
|
&insert_text, &newlines_after_name);
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(insert_text == "std::vector<int> Type::Foo() {\n}");
|
|
|
|
REQUIRE(newlines_after_name == 0);
|
|
|
|
}
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_CASE("extra complex return type") {
|
|
|
|
std::string buffer_content = " std::function < int() > \n Foo(); ";
|
|
|
|
lsPosition declaration = CharPos(buffer_content, 'F');
|
|
|
|
std::string insert_text;
|
|
|
|
int newlines_after_name = 0;
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
LexFunctionDeclaration(buffer_content, declaration, nullopt, &insert_text,
|
2017-11-19 22:11:54 +00:00
|
|
|
&newlines_after_name);
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(insert_text == "std::function < int() > \n Foo() {\n}");
|
|
|
|
REQUIRE(newlines_after_name == 0);
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
LexFunctionDeclaration(buffer_content, declaration, std::string("Type"),
|
2017-11-19 22:11:54 +00:00
|
|
|
&insert_text, &newlines_after_name);
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(insert_text == "std::function < int() > \n Type::Foo() {\n}");
|
|
|
|
REQUIRE(newlines_after_name == 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_CASE("parameters") {
|
|
|
|
std::string buffer_content = "void Foo(int a,\n\n int b); ";
|
|
|
|
lsPosition declaration = CharPos(buffer_content, 'F');
|
|
|
|
std::string insert_text;
|
|
|
|
int newlines_after_name = 0;
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
LexFunctionDeclaration(buffer_content, declaration, nullopt, &insert_text,
|
2017-11-19 22:11:54 +00:00
|
|
|
&newlines_after_name);
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(insert_text == "void Foo(int a,\n\n int b) {\n}");
|
|
|
|
REQUIRE(newlines_after_name == 2);
|
|
|
|
|
|
|
|
LexFunctionDeclaration(buffer_content, declaration, std::string("Type"),
|
2017-11-19 22:11:54 +00:00
|
|
|
&insert_text, &newlines_after_name);
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(insert_text == "void Type::Foo(int a,\n\n int b) {\n}");
|
|
|
|
REQUIRE(newlines_after_name == 2);
|
|
|
|
}
|
2017-05-29 23:57:19 +00:00
|
|
|
}
|
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_SUITE("LexWordAroundPos") {
|
|
|
|
TEST_CASE("edges") {
|
|
|
|
std::string content = "Foobar";
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, 'F'), content) == "Foobar");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, 'o'), content) == "Foobar");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, 'b'), content) == "Foobar");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, 'a'), content) == "Foobar");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, 'r'), content) == "Foobar");
|
|
|
|
}
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_CASE("simple") {
|
|
|
|
std::string content = " Foobar ";
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, 'F'), content) == "Foobar");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, 'o'), content) == "Foobar");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, 'b'), content) == "Foobar");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, 'a'), content) == "Foobar");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, 'r'), content) == "Foobar");
|
|
|
|
}
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_CASE("underscores and numbers") {
|
|
|
|
std::string content = " _my_t5ype7 ";
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, '_'), content) == "_my_t5ype7");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, '5'), content) == "_my_t5ype7");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, 'e'), content) == "_my_t5ype7");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, '7'), content) == "_my_t5ype7");
|
|
|
|
}
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_CASE("dot, dash, colon are skipped") {
|
|
|
|
std::string content = "1. 2- 3:";
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, '1'), content) == "1");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, '2'), content) == "2");
|
|
|
|
REQUIRE(LexWordAroundPos(CharPos(content, '3'), content) == "3");
|
|
|
|
}
|
2017-05-29 23:57:19 +00:00
|
|
|
}
|
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_SUITE("FindIncludeLine") {
|
|
|
|
TEST_CASE("in document") {
|
|
|
|
std::vector<std::string> lines = {
|
|
|
|
"#include <bbb>", // 0
|
|
|
|
"#include <ddd>" // 1
|
|
|
|
};
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(FindIncludeLine(lines, "#include <bbb>") == nullopt);
|
|
|
|
}
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_CASE("insert before") {
|
|
|
|
std::vector<std::string> lines = {
|
|
|
|
"#include <bbb>", // 0
|
|
|
|
"#include <ddd>" // 1
|
|
|
|
};
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(FindIncludeLine(lines, "#include <aaa>") == 0);
|
|
|
|
}
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_CASE("insert middle") {
|
|
|
|
std::vector<std::string> lines = {
|
|
|
|
"#include <bbb>", // 0
|
|
|
|
"#include <ddd>" // 1
|
|
|
|
};
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(FindIncludeLine(lines, "#include <ccc>") == 1);
|
|
|
|
}
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
TEST_CASE("insert after") {
|
|
|
|
std::vector<std::string> lines = {
|
|
|
|
"#include <bbb>", // 0
|
|
|
|
"#include <ddd>", // 1
|
|
|
|
"", // 2
|
|
|
|
};
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(FindIncludeLine(lines, "#include <eee>") == 2);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_CASE("ignore header") {
|
|
|
|
std::vector<std::string> lines = {
|
|
|
|
"// FOOBAR", // 0
|
|
|
|
"// FOOBAR", // 1
|
|
|
|
"// FOOBAR", // 2
|
|
|
|
"// FOOBAR", // 3
|
|
|
|
"", // 4
|
|
|
|
"#include <bbb>", // 5
|
|
|
|
"#include <ddd>", // 6
|
|
|
|
"", // 7
|
|
|
|
};
|
2017-05-29 23:57:19 +00:00
|
|
|
|
2017-11-19 18:05:06 +00:00
|
|
|
REQUIRE(FindIncludeLine(lines, "#include <a>") == 5);
|
|
|
|
REQUIRE(FindIncludeLine(lines, "#include <c>") == 6);
|
|
|
|
REQUIRE(FindIncludeLine(lines, "#include <e>") == 7);
|
|
|
|
}
|
|
|
|
}
|