Fix issue with fully qualified symbols, improve allocation code, improve partial symbol completion
This commit is contained in:
parent
a8e90377d5
commit
e6610fd7bc
|
|
@ -48,7 +48,7 @@ import stupidlog;
|
||||||
* Returns:
|
* Returns:
|
||||||
* the autocompletion response
|
* the autocompletion response
|
||||||
*/
|
*/
|
||||||
AutocompleteResponse getDoc(const AutocompleteRequest request)
|
public AutocompleteResponse getDoc(const AutocompleteRequest request)
|
||||||
{
|
{
|
||||||
// Log.trace("Getting doc comments");
|
// Log.trace("Getting doc comments");
|
||||||
AutocompleteResponse response;
|
AutocompleteResponse response;
|
||||||
|
|
@ -58,16 +58,8 @@ AutocompleteResponse getDoc(const AutocompleteRequest request)
|
||||||
allocator, &cache);
|
allocator, &cache);
|
||||||
if (symbols.length == 0)
|
if (symbols.length == 0)
|
||||||
Log.error("Could not find symbol");
|
Log.error("Could not find symbol");
|
||||||
else foreach (symbol; symbols)
|
else foreach (symbol; symbols.filter!(a => a.doc !is null))
|
||||||
{
|
|
||||||
if (symbol.doc is null)
|
|
||||||
{
|
|
||||||
// Log.trace("Doc comment for ", symbol.name, " was null");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// Log.trace("Adding doc comment for ", symbol.name, ": ", symbol.doc);
|
|
||||||
response.docComments ~= formatComment(symbol.doc);
|
response.docComments ~= formatComment(symbol.doc);
|
||||||
}
|
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -78,9 +70,8 @@ AutocompleteResponse getDoc(const AutocompleteRequest request)
|
||||||
* Returns:
|
* Returns:
|
||||||
* the autocompletion response
|
* the autocompletion response
|
||||||
*/
|
*/
|
||||||
AutocompleteResponse findDeclaration(const AutocompleteRequest request)
|
public AutocompleteResponse findDeclaration(const AutocompleteRequest request)
|
||||||
{
|
{
|
||||||
// Log.trace("Finding declaration");
|
|
||||||
AutocompleteResponse response;
|
AutocompleteResponse response;
|
||||||
auto allocator = scoped!(CAllocatorImpl!(BlockAllocator!(1024 * 16)))();
|
auto allocator = scoped!(CAllocatorImpl!(BlockAllocator!(1024 * 16)))();
|
||||||
auto cache = StringCache(StringCache.defaultBucketCount);
|
auto cache = StringCache(StringCache.defaultBucketCount);
|
||||||
|
|
@ -90,11 +81,9 @@ AutocompleteResponse findDeclaration(const AutocompleteRequest request)
|
||||||
{
|
{
|
||||||
response.symbolLocation = symbols[0].location;
|
response.symbolLocation = symbols[0].location;
|
||||||
response.symbolFilePath = symbols[0].symbolFile.idup;
|
response.symbolFilePath = symbols[0].symbolFile.idup;
|
||||||
// Log.trace(symbols[0].name, " declared in ",
|
|
||||||
// response.symbolFilePath, " at ", response.symbolLocation);
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
Log.error("Could not find symbol");
|
Log.error("Could not find symbol declaration");
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -105,41 +94,70 @@ AutocompleteResponse findDeclaration(const AutocompleteRequest request)
|
||||||
* Returns:
|
* Returns:
|
||||||
* the autocompletion response
|
* the autocompletion response
|
||||||
*/
|
*/
|
||||||
AutocompleteResponse complete(const AutocompleteRequest request)
|
public AutocompleteResponse complete(const AutocompleteRequest request)
|
||||||
{
|
{
|
||||||
// Log.info("Got a completion request");
|
|
||||||
|
|
||||||
const(Token)[] tokenArray;
|
const(Token)[] tokenArray;
|
||||||
auto cache = StringCache(StringCache.defaultBucketCount);
|
auto cache = StringCache(StringCache.defaultBucketCount);
|
||||||
auto beforeTokens = getTokensBeforeCursor(request.sourceCode,
|
auto beforeTokens = getTokensBeforeCursor(request.sourceCode,
|
||||||
request.cursorPosition, &cache, tokenArray);
|
request.cursorPosition, &cache, tokenArray);
|
||||||
string partial;
|
|
||||||
IdType tokenType;
|
|
||||||
|
|
||||||
if (beforeTokens.length >= 2 && (beforeTokens[$ - 1] == tok!"("
|
if (beforeTokens.length >= 2 && (beforeTokens[$ - 1] == tok!"("
|
||||||
|| beforeTokens[$ - 1] == tok!"["))
|
|| beforeTokens[$ - 1] == tok!"["))
|
||||||
{
|
{
|
||||||
return parenCompletion(beforeTokens, tokenArray, request.cursorPosition);
|
return parenCompletion(beforeTokens, tokenArray, request.cursorPosition);
|
||||||
}
|
}
|
||||||
|
else if (beforeTokens.length >= 2 && isSelectiveImport(beforeTokens))
|
||||||
if (beforeTokens.length >= 2 && isSelectiveImport(beforeTokens))
|
|
||||||
{
|
{
|
||||||
return selectiveImportCompletion(beforeTokens);
|
return selectiveImportCompletion(beforeTokens);
|
||||||
}
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
return dotCompletion(beforeTokens, tokenArray, request.cursorPosition);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/******************************************************************************/
|
||||||
|
private:
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles dot completion for identifiers and types.
|
||||||
|
* Params:
|
||||||
|
* beforeTokens = the tokens before the cursor
|
||||||
|
* tokenArray = all tokens in the file
|
||||||
|
* cursorPosition = the cursor position in bytes
|
||||||
|
* Returns:
|
||||||
|
* the autocompletion response
|
||||||
|
*/
|
||||||
|
AutocompleteResponse dotCompletion(T)(T beforeTokens,
|
||||||
|
const(Token)[] tokenArray, size_t cursorPosition)
|
||||||
|
{
|
||||||
AutocompleteResponse response;
|
AutocompleteResponse response;
|
||||||
|
|
||||||
|
// Partial symbol name appearing after the dot character and before the
|
||||||
|
// cursor.
|
||||||
|
string partial;
|
||||||
|
|
||||||
|
// Type of the token before the dot, or identifier if the cursor was at
|
||||||
|
// an identifier.
|
||||||
|
IdType significantTokenType;
|
||||||
|
|
||||||
if (beforeTokens.length >= 1 && beforeTokens[$ - 1] == tok!"identifier")
|
if (beforeTokens.length >= 1 && beforeTokens[$ - 1] == tok!"identifier")
|
||||||
{
|
{
|
||||||
partial = beforeTokens[$ - 1].text;
|
// Set partial to the slice of the identifier between the beginning
|
||||||
tokenType = beforeTokens[$ - 1].type;
|
// of the identifier and the cursor. This improves the completion
|
||||||
|
// responses when the cursor is in the middle of an identifier instead
|
||||||
|
// of at the end
|
||||||
|
auto t = beforeTokens[$ - 1];
|
||||||
|
partial = t.text[0 .. cursorPosition - t.index];
|
||||||
|
|
||||||
|
significantTokenType = tok!"identifier";
|
||||||
beforeTokens = beforeTokens[0 .. $ - 1];
|
beforeTokens = beforeTokens[0 .. $ - 1];
|
||||||
}
|
}
|
||||||
else if (beforeTokens.length >= 2 && beforeTokens[$ - 1] == tok!".")
|
else if (beforeTokens.length >= 2 && beforeTokens[$ - 1] == tok!".")
|
||||||
tokenType = beforeTokens[$ - 2].type;
|
significantTokenType = beforeTokens[$ - 2].type;
|
||||||
else
|
else
|
||||||
return response;
|
return response;
|
||||||
auto allocator = scoped!(CAllocatorImpl!(BlockAllocator!(1024 * 16)))();
|
|
||||||
switch (tokenType)
|
switch (significantTokenType)
|
||||||
{
|
{
|
||||||
case tok!"stringLiteral":
|
case tok!"stringLiteral":
|
||||||
case tok!"wstringLiteral":
|
case tok!"wstringLiteral":
|
||||||
|
|
@ -178,12 +196,11 @@ AutocompleteResponse complete(const AutocompleteRequest request)
|
||||||
case tok!")":
|
case tok!")":
|
||||||
case tok!"]":
|
case tok!"]":
|
||||||
case tok!"this":
|
case tok!"this":
|
||||||
auto semanticAllocator = scoped!(CAllocatorImpl!(BlockAllocator!(1024*16)));
|
auto allocator = scoped!(CAllocatorImpl!(BlockAllocator!(1024*16)));
|
||||||
Scope* completionScope = generateAutocompleteTrees(tokenArray,
|
Scope* completionScope = generateAutocompleteTrees(tokenArray, allocator);
|
||||||
"stdin", allocator, semanticAllocator);
|
|
||||||
scope(exit) typeid(Scope).destroy(completionScope);
|
scope(exit) typeid(Scope).destroy(completionScope);
|
||||||
response.setCompletions(completionScope, getExpression(beforeTokens),
|
response.setCompletions(completionScope, getExpression(beforeTokens),
|
||||||
request.cursorPosition, CompletionType.identifiers, false, partial);
|
cursorPosition, CompletionType.identifiers, false, partial);
|
||||||
break;
|
break;
|
||||||
case tok!"(":
|
case tok!"(":
|
||||||
case tok!"{":
|
case tok!"{":
|
||||||
|
|
@ -208,7 +225,7 @@ auto getTokensBeforeCursor(const(ubyte[]) sourceCode, size_t cursorPosition,
|
||||||
StringCache* cache, out const(Token)[] tokenArray)
|
StringCache* cache, out const(Token)[] tokenArray)
|
||||||
{
|
{
|
||||||
LexerConfig config;
|
LexerConfig config;
|
||||||
config.fileName = "stdin";
|
config.fileName = "";
|
||||||
tokenArray = getTokensForParser(cast(ubyte[]) sourceCode, config, cache);
|
tokenArray = getTokensForParser(cast(ubyte[]) sourceCode, config, cache);
|
||||||
auto sortedTokens = assumeSorted(tokenArray);
|
auto sortedTokens = assumeSorted(tokenArray);
|
||||||
return sortedTokens.lowerBound(cast(size_t) cursorPosition);
|
return sortedTokens.lowerBound(cast(size_t) cursorPosition);
|
||||||
|
|
@ -228,9 +245,7 @@ ACSymbol*[] getSymbolsForCompletion(const AutocompleteRequest request,
|
||||||
const(Token)[] tokenArray;
|
const(Token)[] tokenArray;
|
||||||
auto beforeTokens = getTokensBeforeCursor(request.sourceCode,
|
auto beforeTokens = getTokensBeforeCursor(request.sourceCode,
|
||||||
request.cursorPosition, cache, tokenArray);
|
request.cursorPosition, cache, tokenArray);
|
||||||
auto semanticAllocator = scoped!(CAllocatorImpl!(BlockAllocator!(1024*16)));
|
Scope* completionScope = generateAutocompleteTrees(tokenArray, allocator);
|
||||||
Scope* completionScope = generateAutocompleteTrees(tokenArray,
|
|
||||||
"stdin", allocator, semanticAllocator);
|
|
||||||
scope(exit) typeid(Scope).destroy(completionScope);
|
scope(exit) typeid(Scope).destroy(completionScope);
|
||||||
auto expression = getExpression(beforeTokens);
|
auto expression = getExpression(beforeTokens);
|
||||||
return getSymbolsByTokenChain(completionScope, expression,
|
return getSymbolsByTokenChain(completionScope, expression,
|
||||||
|
|
@ -251,7 +266,6 @@ AutocompleteResponse parenCompletion(T)(T beforeTokens,
|
||||||
{
|
{
|
||||||
AutocompleteResponse response;
|
AutocompleteResponse response;
|
||||||
immutable(string)[] completions;
|
immutable(string)[] completions;
|
||||||
auto allocator = scoped!(CAllocatorImpl!(BlockAllocator!(1024 * 16)))();
|
|
||||||
switch (beforeTokens[$ - 2].type)
|
switch (beforeTokens[$ - 2].type)
|
||||||
{
|
{
|
||||||
case tok!"__traits":
|
case tok!"__traits":
|
||||||
|
|
@ -279,9 +293,8 @@ AutocompleteResponse parenCompletion(T)(T beforeTokens,
|
||||||
case tok!"identifier":
|
case tok!"identifier":
|
||||||
case tok!")":
|
case tok!")":
|
||||||
case tok!"]":
|
case tok!"]":
|
||||||
auto semanticAllocator = scoped!(CAllocatorImpl!(BlockAllocator!(1024*16)));
|
auto allocator = scoped!(CAllocatorImpl!(BlockAllocator!(1024 * 16)))();
|
||||||
Scope* completionScope = generateAutocompleteTrees(tokenArray,
|
Scope* completionScope = generateAutocompleteTrees(tokenArray, allocator);
|
||||||
"stdin", allocator, semanticAllocator);
|
|
||||||
scope(exit) typeid(Scope).destroy(completionScope);
|
scope(exit) typeid(Scope).destroy(completionScope);
|
||||||
auto expression = getExpression(beforeTokens[0 .. $ - 1]);
|
auto expression = getExpression(beforeTokens[0 .. $ - 1]);
|
||||||
response.setCompletions(completionScope, expression,
|
response.setCompletions(completionScope, expression,
|
||||||
|
|
@ -299,11 +312,11 @@ bool isSelectiveImport(T)(T tokens)
|
||||||
size_t i = tokens.length - 1;
|
size_t i = tokens.length - 1;
|
||||||
if (!(tokens[i] == tok!":" || tokens[i] == tok!","))
|
if (!(tokens[i] == tok!":" || tokens[i] == tok!","))
|
||||||
return false;
|
return false;
|
||||||
bool r = false;
|
bool foundColon = false;
|
||||||
loop: while (true) switch (tokens[i].type)
|
loop: while (true) switch (tokens[i].type)
|
||||||
{
|
{
|
||||||
case tok!":":
|
case tok!":":
|
||||||
r = true;
|
foundColon = true;
|
||||||
goto case;
|
goto case;
|
||||||
case tok!"identifier":
|
case tok!"identifier":
|
||||||
case tok!"=":
|
case tok!"=":
|
||||||
|
|
@ -315,7 +328,7 @@ bool isSelectiveImport(T)(T tokens)
|
||||||
i--;
|
i--;
|
||||||
break;
|
break;
|
||||||
case tok!"import":
|
case tok!"import":
|
||||||
return r;
|
return foundColon;
|
||||||
default:
|
default:
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
@ -392,9 +405,8 @@ body
|
||||||
auto symbols = ModuleCache.getSymbolsInModule(ModuleCache.resolveImportLoctation(path));
|
auto symbols = ModuleCache.getSymbolsInModule(ModuleCache.resolveImportLoctation(path));
|
||||||
import containers.hashset;
|
import containers.hashset;
|
||||||
HashSet!string h;
|
HashSet!string h;
|
||||||
foreach (s; symbols.parts[])
|
|
||||||
{
|
void addSymbolToResponses(ACSymbol* sy)
|
||||||
if (s.kind == CompletionKind.importSymbol) foreach (sy; s.type.parts[])
|
|
||||||
{
|
{
|
||||||
auto a = ACSymbol(sy.name);
|
auto a = ACSymbol(sy.name);
|
||||||
if (!builtinSymbols.contains(&a) && sy.name !is null && !h.contains(sy.name))
|
if (!builtinSymbols.contains(&a) && sy.name !is null && !h.contains(sy.name))
|
||||||
|
|
@ -404,16 +416,13 @@ body
|
||||||
h.insert(sy.name);
|
h.insert(sy.name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
foreach (s; symbols.parts[])
|
||||||
|
{
|
||||||
|
if (s.kind == CompletionKind.importSymbol) foreach (sy; s.type.parts[])
|
||||||
|
addSymbolToResponses(sy);
|
||||||
else
|
else
|
||||||
{
|
addSymbolToResponses(s);
|
||||||
auto a = ACSymbol(s.name);
|
|
||||||
if (!builtinSymbols.contains(&a) && s.name !is null && !h.contains(s.name))
|
|
||||||
{
|
|
||||||
response.completionKinds ~= s.kind;
|
|
||||||
response.completions ~= s.name;
|
|
||||||
h.insert(s.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
response.completionType = CompletionType.identifiers;
|
response.completionType = CompletionType.identifiers;
|
||||||
return response;
|
return response;
|
||||||
|
|
@ -426,8 +435,6 @@ ACSymbol*[] getSymbolsByTokenChain(T)(Scope* completionScope,
|
||||||
T tokens, size_t cursorPosition, CompletionType completionType)
|
T tokens, size_t cursorPosition, CompletionType completionType)
|
||||||
{
|
{
|
||||||
import std.d.lexer;
|
import std.d.lexer;
|
||||||
// Log.trace("Getting symbols from token chain",
|
|
||||||
// tokens.map!stringToken);
|
|
||||||
// Find the symbol corresponding to the beginning of the chain
|
// Find the symbol corresponding to the beginning of the chain
|
||||||
ACSymbol*[] symbols;
|
ACSymbol*[] symbols;
|
||||||
if (tokens.length == 0)
|
if (tokens.length == 0)
|
||||||
|
|
@ -435,7 +442,6 @@ ACSymbol*[] getSymbolsByTokenChain(T)(Scope* completionScope,
|
||||||
if (tokens[0] == tok!"." && tokens.length > 1)
|
if (tokens[0] == tok!"." && tokens.length > 1)
|
||||||
{
|
{
|
||||||
tokens = tokens[1 .. $];
|
tokens = tokens[1 .. $];
|
||||||
// Log.trace("Looking for ", stringToken(tokens[0]), " at global scope");
|
|
||||||
symbols = completionScope.getSymbolsAtGlobalScope(stringToken(tokens[0]));
|
symbols = completionScope.getSymbolsAtGlobalScope(stringToken(tokens[0]));
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
|
@ -447,11 +453,6 @@ ACSymbol*[] getSymbolsByTokenChain(T)(Scope* completionScope,
|
||||||
" from position ", cursorPosition);
|
" from position ", cursorPosition);
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
// else
|
|
||||||
// {
|
|
||||||
// Log.trace("Found ", symbols[0].name, " at ", symbols[0].location,
|
|
||||||
// " with type ", symbols[0].type is null ? "null" : symbols[0].type.name);
|
|
||||||
// }
|
|
||||||
|
|
||||||
if (shouldSwapWithType(completionType, symbols[0].kind, 0, tokens.length - 1))
|
if (shouldSwapWithType(completionType, symbols[0].kind, 0, tokens.length - 1))
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -29,12 +29,13 @@ import std.d.lexer;
|
||||||
import std.d.parser;
|
import std.d.parser;
|
||||||
import std.typecons;
|
import std.typecons;
|
||||||
|
|
||||||
Scope* generateAutocompleteTrees(const(Token)[] tokens, string symbolFile,
|
/**
|
||||||
CAllocator symbolAllocator, CAllocator semanticAllocator)
|
* Used by autocompletion.
|
||||||
|
*/
|
||||||
|
Scope* generateAutocompleteTrees(const(Token)[] tokens, CAllocator symbolAllocator)
|
||||||
{
|
{
|
||||||
Module m = parseModule(tokens, "editor buffer", semanticAllocator, &doesNothing);
|
Module m = parseModule(tokens, "stdin", symbolAllocator, &doesNothing);
|
||||||
auto first = scoped!FirstPass(m, symbolFile, symbolAllocator,
|
auto first = scoped!FirstPass(m, "stdin", symbolAllocator, symbolAllocator);
|
||||||
semanticAllocator);
|
|
||||||
first.run();
|
first.run();
|
||||||
|
|
||||||
SecondPass second = SecondPass(first);
|
SecondPass second = SecondPass(first);
|
||||||
|
|
@ -46,13 +47,22 @@ Scope* generateAutocompleteTrees(const(Token)[] tokens, string symbolFile,
|
||||||
return third.moduleScope;
|
return third.moduleScope;
|
||||||
}
|
}
|
||||||
|
|
||||||
Module parseModuleSimple(const(Token)[] tokens, string fileName, CAllocator p)
|
/**
|
||||||
|
* Used by import symbol caching.
|
||||||
|
*
|
||||||
|
* Params:
|
||||||
|
* tokens = the tokens that compose the file
|
||||||
|
* fileName = the name of the file being parsed
|
||||||
|
* parseAllocator = the allocator to use for the AST
|
||||||
|
* Returns: the parsed module
|
||||||
|
*/
|
||||||
|
Module parseModuleSimple(const(Token)[] tokens, string fileName, CAllocator parseAllocator)
|
||||||
{
|
{
|
||||||
auto parser = scoped!SimpleParser();
|
auto parser = scoped!SimpleParser();
|
||||||
parser.fileName = fileName;
|
parser.fileName = fileName;
|
||||||
parser.tokens = tokens;
|
parser.tokens = tokens;
|
||||||
parser.messageFunction = &doesNothing;
|
parser.messageFunction = &doesNothing;
|
||||||
parser.allocator = p;
|
parser.allocator = parseAllocator;
|
||||||
return parser.parseModule();
|
return parser.parseModule();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -163,6 +163,7 @@ private:
|
||||||
IMPORT_SYMBOL_NAME, CompletionKind.importSymbol, symbol));
|
IMPORT_SYMBOL_NAME, CompletionKind.importSymbol, symbol));
|
||||||
else
|
else
|
||||||
currentScope.symbols.insert(symbol.parts[]);
|
currentScope.symbols.insert(symbol.parts[]);
|
||||||
|
currentScope.symbols.insert(moduleSymbol);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue