This commit is contained in:
commit
2a509b158f
|
|
@ -32,7 +32,31 @@ immutable string[] versions = ["AIX", "all", "Alpha", "ARM", "BigEndian", "BSD",
|
||||||
*/
|
*/
|
||||||
size_t findEndOfExpression(const Token[] tokens, size_t index)
|
size_t findEndOfExpression(const Token[] tokens, size_t index)
|
||||||
{
|
{
|
||||||
return index;
|
size_t i = index;
|
||||||
|
while (i < tokens.length)
|
||||||
|
{
|
||||||
|
switch (tokens[i].type)
|
||||||
|
{
|
||||||
|
case TokenType.RBrace:
|
||||||
|
case TokenType.RParen:
|
||||||
|
case TokenType.RBracket:
|
||||||
|
case TokenType.Semicolon:
|
||||||
|
break;
|
||||||
|
case TokenType.LParen:
|
||||||
|
skipParens(tokens, index);
|
||||||
|
break;
|
||||||
|
case TokenType.LBrace:
|
||||||
|
skipBraces(tokens, index);
|
||||||
|
break;
|
||||||
|
case TokenType.LBracket:
|
||||||
|
skipBrackets(tokens, index);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
++i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return i;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t findBeginningOfExpression(const Token[] tokens, size_t index)
|
size_t findBeginningOfExpression(const Token[] tokens, size_t index)
|
||||||
|
|
@ -64,19 +88,19 @@ struct AutoComplete
|
||||||
|
|
||||||
switch (symbol.type)
|
switch (symbol.type)
|
||||||
{
|
{
|
||||||
case TokenType.floatLiteral:
|
case TokenType.FloatLiteral:
|
||||||
return "float";
|
return "float";
|
||||||
case TokenType.doubleLiteral:
|
case TokenType.DoubleLiteral:
|
||||||
return "double";
|
return "double";
|
||||||
case TokenType.realLiteral:
|
case TokenType.RealLiteral:
|
||||||
return "real";
|
return "real";
|
||||||
case TokenType.intLiteral:
|
case TokenType.IntLiteral:
|
||||||
return "int";
|
return "int";
|
||||||
case TokenType.unsignedIntLiteral:
|
case TokenType.UnsignedIntLiteral:
|
||||||
return "uint";
|
return "uint";
|
||||||
case TokenType.longLiteral:
|
case TokenType.LongLiteral:
|
||||||
return "long";
|
return "long";
|
||||||
case TokenType.unsignedLongLiteral:
|
case TokenType.UnsignedLongLiteral:
|
||||||
return "ulong";
|
return "ulong";
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
|
|
@ -92,21 +116,21 @@ struct AutoComplete
|
||||||
auto index = preceedingTokens.length - 1;
|
auto index = preceedingTokens.length - 1;
|
||||||
while (true)
|
while (true)
|
||||||
{
|
{
|
||||||
if (preceedingTokens[index] == TokenType.lBrace)
|
if (preceedingTokens[index] == TokenType.LBrace)
|
||||||
--depth;
|
--depth;
|
||||||
else if (preceedingTokens[index] == TokenType.rBrace)
|
else if (preceedingTokens[index] == TokenType.RBrace)
|
||||||
++depth;
|
++depth;
|
||||||
else if (depth <= 0 && preceedingTokens[index].value == symbol)
|
else if (depth <= 0 && preceedingTokens[index].value == symbol)
|
||||||
{
|
{
|
||||||
// Found the symbol, now determine if it was declared here.
|
// Found the symbol, now determine if it was declared here.
|
||||||
auto p = preceedingTokens[index - 1];
|
auto p = preceedingTokens[index - 1];
|
||||||
if ((p == TokenType.tAuto || p == TokenType.tImmutable
|
if ((p == TokenType.Auto || p == TokenType.Immutable
|
||||||
|| p == TokenType.tConst)
|
|| p == TokenType.Const)
|
||||||
&& preceedingTokens[index + 1] == TokenType.assign)
|
&& preceedingTokens[index + 1] == TokenType.Assign)
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
else if (p == TokenType.identifier
|
else if (p == TokenType.Identifier
|
||||||
|| (p.type > TokenType.TYPES_BEGIN
|
|| (p.type > TokenType.TYPES_BEGIN
|
||||||
&& p.type < TokenType.TYPES_END))
|
&& p.type < TokenType.TYPES_END))
|
||||||
{
|
{
|
||||||
|
|
@ -153,14 +177,14 @@ struct AutoComplete
|
||||||
return "";
|
return "";
|
||||||
switch (tokens[index].type)
|
switch (tokens[index].type)
|
||||||
{
|
{
|
||||||
case TokenType.tVersion:
|
case TokenType.Version:
|
||||||
return to!string(join(map!`a ~ "?1"`(versions), " ").array());
|
return to!string(join(map!`a ~ "?1"`(versions), " ").array());
|
||||||
case TokenType.tIf:
|
case TokenType.If:
|
||||||
case TokenType.tCast:
|
case TokenType.Cast:
|
||||||
case TokenType.tWhile:
|
case TokenType.While:
|
||||||
case TokenType.tFor:
|
case TokenType.For:
|
||||||
case TokenType.tForeach:
|
case TokenType.Foreach:
|
||||||
case TokenType.tSwitch:
|
case TokenType.Switch:
|
||||||
return "";
|
return "";
|
||||||
default:
|
default:
|
||||||
return "";
|
return "";
|
||||||
|
|
|
||||||
4
build.sh
4
build.sh
|
|
@ -1,2 +1,2 @@
|
||||||
dmd *.d -release -noboundscheck -O -w -wi -m64 -property -ofdscanner
|
#dmd *.d -release -noboundscheck -O -w -wi -m64 -property -ofdscanner
|
||||||
#dmd *.d -g -unittest -m64 -w -wi -property -ofdscanner
|
dmd *.d -g -unittest -m64 -w -wi -property -ofdscanner
|
||||||
|
|
|
||||||
|
|
@ -44,10 +44,10 @@ html { background-color: #111; color: #ccc; }
|
||||||
case TokenType.TYPES_BEGIN: .. case TokenType.TYPES_END:
|
case TokenType.TYPES_BEGIN: .. case TokenType.TYPES_END:
|
||||||
writeSpan("type", t.value);
|
writeSpan("type", t.value);
|
||||||
break;
|
break;
|
||||||
case TokenType.comment:
|
case TokenType.Comment:
|
||||||
writeSpan("comment", t.value);
|
writeSpan("comment", t.value);
|
||||||
break;
|
break;
|
||||||
case TokenType.stringLiteral:
|
case TokenType.STRINGS_BEGIN: .. case TokenType.STRINGS_END:
|
||||||
writeSpan("string", t.value);
|
writeSpan("string", t.value);
|
||||||
break;
|
break;
|
||||||
case TokenType.NUMBERS_BEGIN: .. case TokenType.NUMBERS_END:
|
case TokenType.NUMBERS_BEGIN: .. case TokenType.NUMBERS_END:
|
||||||
|
|
|
||||||
619
langutils.d
619
langutils.d
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
// Copyright Brian Schott (Sir Alaran) 2012.
|
// Copyright Brian Schott (Sir Alaran) 2012.
|
||||||
// Distributed under the Boost Software License, Version 1.0.
|
// Distributed under the Boost Software License, Version 1.0.
|
||||||
// (See accompanying file LICENSE_1_0.txt or copy at
|
// (See accompanying file LICENSE_1_0.txt or copy at
|
||||||
|
|
@ -41,7 +40,7 @@ pure nothrow TokenType lookupTokenType(const string input)
|
||||||
if (type !is null)
|
if (type !is null)
|
||||||
return *type;
|
return *type;
|
||||||
else
|
else
|
||||||
return TokenType.identifier;
|
return TokenType.Identifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -52,222 +51,226 @@ enum TokenType: uint
|
||||||
{
|
{
|
||||||
// Operators
|
// Operators
|
||||||
OPERATORS_BEGIN,
|
OPERATORS_BEGIN,
|
||||||
div, /// /
|
Assign, /// =
|
||||||
divEquals, /// /=
|
BitAnd, /// &
|
||||||
dot, /// .
|
BitAndEquals, /// &=
|
||||||
slice, // ..
|
BitOr, /// |
|
||||||
vararg, /// ...
|
BitOrEquals, /// |=
|
||||||
bitAnd, /// &
|
CatEquals, /// ~=
|
||||||
bitAndEquals, /// &=
|
Colon, /// :
|
||||||
lAnd, /// &&
|
Comma, /// ,
|
||||||
bitOr, /// |
|
Decrement, /// --
|
||||||
bitOrEquals, /// |=
|
Div, /// /
|
||||||
lOr, /// ||
|
DivEquals, /// /=
|
||||||
minus, /// -
|
Dollar, /// $
|
||||||
minusEquals, /// -=
|
Dot, /// .
|
||||||
uMinus, /// --
|
Equals, /// ==
|
||||||
plus, /// +
|
GoesTo, // =>
|
||||||
plusEquals, /// +=
|
Greater, /// >
|
||||||
uPlus, /// ++
|
GreaterEqual, /// >=
|
||||||
less, /// <
|
Hash, // #
|
||||||
lessEqual, /// <=
|
Increment, /// ++
|
||||||
shiftLeft, /// <<
|
LBrace, /// {
|
||||||
shiftLeftEqual, /// <<=
|
LBracket, /// [
|
||||||
lessOrGreater, /// <>
|
Less, /// <
|
||||||
lessEqualGreater, // <>=
|
LessEqual, /// <=
|
||||||
greater, /// >
|
LessEqualGreater, // <>=
|
||||||
greaterEqual, /// >=
|
LessOrGreater, /// <>
|
||||||
shiftRightEqual, /// >>=
|
LogicAnd, /// &&
|
||||||
unsignedShiftRightEqual, /// >>>=
|
LogicOr, /// ||
|
||||||
shiftRight, /// >>
|
LParen, /// $(LPAREN)
|
||||||
unsignedShiftRight, /// >>>
|
Minus, /// -
|
||||||
not, /// !
|
MinusEquals, /// -=
|
||||||
notEquals, /// !=
|
Mod, /// %
|
||||||
notLessEqualGreater, /// !<>
|
ModEquals, /// %=
|
||||||
unordered, /// !<>=
|
MulEquals, /// *=
|
||||||
notLess, /// !<
|
Not, /// !
|
||||||
notLessEqual, /// !<=
|
NotEquals, /// !=
|
||||||
notGreater, /// !>
|
NotGreater, /// !>
|
||||||
notGreaterEqual, /// !>=
|
NotGreaterEqual, /// !>=
|
||||||
lParen, /// $(LPAREN)
|
NotLess, /// !<
|
||||||
rParen, /// $(RPAREN)
|
NotLessEqual, /// !<=
|
||||||
lBracket, /// [
|
NotLessEqualGreater, /// !<>
|
||||||
rBracket, /// ]
|
Plus, /// +
|
||||||
lBrace, /// {
|
PlusEquals, /// +=
|
||||||
rBrace, /// }
|
Pow, /// ^^
|
||||||
ternary, /// ?
|
PowEquals, /// ^^=
|
||||||
comma, /// ,
|
RBrace, /// }
|
||||||
semicolon, /// ;
|
RBracket, /// ]
|
||||||
colon, /// :
|
RParen, /// $(RPAREN)
|
||||||
dollar, /// $
|
Semicolon, /// ;
|
||||||
assign, /// =
|
ShiftLeft, /// <<
|
||||||
equals, /// ==
|
ShiftLeftEqual, /// <<=
|
||||||
star, /// *
|
ShiftRight, /// >>
|
||||||
mulEquals, /// *=
|
ShiftRightEqual, /// >>=
|
||||||
mod, /// %
|
Slice, // ..
|
||||||
modEquals, /// %=
|
Star, /// *
|
||||||
xor, /// ^
|
Ternary, /// ?
|
||||||
xorEquals, /// ^=
|
Tilde, /// ~
|
||||||
pow, /// ^^
|
Unordered, /// !<>=
|
||||||
powEquals, /// ^^=
|
UnsignedShiftRight, /// >>>
|
||||||
tilde, /// ~
|
UnsignedShiftRightEqual, /// >>>=
|
||||||
catEquals, /// ~=
|
Vararg, /// ...
|
||||||
hash, // #
|
Xor, /// ^
|
||||||
goesTo, // =>
|
XorEquals, /// ^=
|
||||||
OPERATORS_END,
|
OPERATORS_END,
|
||||||
|
|
||||||
// Types
|
// Types
|
||||||
TYPES_BEGIN,
|
TYPES_BEGIN,
|
||||||
tString, /// string
|
Bool, /// bool,
|
||||||
tBool, /// bool,
|
Byte, /// byte,
|
||||||
tByte, /// byte,
|
Cdouble, /// cdouble,
|
||||||
tCdouble, /// cdouble,
|
Cent, /// cent,
|
||||||
tCent, /// cent,
|
Cfloat, /// cfloat,
|
||||||
tCfloat, /// cfloat,
|
Char, /// char,
|
||||||
tChar, /// char,
|
Creal, /// creal,
|
||||||
tCreal, /// creal,
|
Dchar, /// dchar,
|
||||||
tDchar, /// dchar,
|
Double, /// double,
|
||||||
tDouble, /// double,
|
DString, /// dstring
|
||||||
tFloat, /// float,
|
Float, /// float,
|
||||||
tUbyte, /// ubyte,
|
Function, /// function,
|
||||||
tUcent, /// ucent,
|
Idouble, /// idouble,
|
||||||
tUint, /// uint,
|
Ifloat, /// ifloat,
|
||||||
tUlong, /// ulong,
|
Int, /// int,
|
||||||
tShort, /// short,
|
Ireal, /// ireal,
|
||||||
tReal, /// real,
|
Long, /// long,
|
||||||
tLong, /// long,
|
Real, /// real,
|
||||||
tInt, /// int,
|
Short, /// short,
|
||||||
tFunction, /// function,
|
String, /// string
|
||||||
tIdouble, /// idouble,
|
Ubyte, /// ubyte,
|
||||||
tIreal, /// ireal,
|
Ucent, /// ucent,
|
||||||
tWchar, /// wchar,
|
Uint, /// uint,
|
||||||
tVoid, /// void,
|
Ulong, /// ulong,
|
||||||
tUshort, /// ushort,
|
Ushort, /// ushort,
|
||||||
tIfloat, /// if loat,
|
Void, /// void,
|
||||||
|
Wchar, /// wchar,
|
||||||
|
WString, /// wstring
|
||||||
TYPES_END,
|
TYPES_END,
|
||||||
tTemplate, /// template,
|
Template, /// template,
|
||||||
|
|
||||||
// Keywords
|
// Keywords
|
||||||
KEYWORDS_BEGIN,
|
KEYWORDS_BEGIN,
|
||||||
ATTRIBUTES_BEGIN,
|
ATTRIBUTES_BEGIN,
|
||||||
tExtern, /// extern,
|
Align, /// align,
|
||||||
tAlign, /// align,
|
Deprecated, /// deprecated,
|
||||||
tPragma, /// pragma,
|
Extern, /// extern,
|
||||||
tDeprecated, /// deprecated,
|
Pragma, /// pragma,
|
||||||
PROTECTION_BEGIN,
|
PROTECTION_BEGIN,
|
||||||
tPackage, /// package,
|
Export, /// export,
|
||||||
tPrivate, /// private,
|
Package, /// package,
|
||||||
tProtected, /// protected,
|
Private, /// private,
|
||||||
tPublic, /// public,
|
Protected, /// protected,
|
||||||
tExport, /// export,
|
Public, /// public,
|
||||||
PROTECTION_END,
|
PROTECTION_END,
|
||||||
tStatic, /// static,
|
Abstract, /// abstract,
|
||||||
tSynchronized, /// synchronized,
|
AtDisable, /// @disable
|
||||||
tFinal, /// final
|
Auto, /// auto,
|
||||||
tAbstract, /// abstract,
|
Const, /// const,
|
||||||
tConst, /// const,
|
Final, /// final
|
||||||
tAuto, /// auto,
|
Gshared, /// __gshared,
|
||||||
tScope, /// scope,
|
Immutable, // immutable,
|
||||||
t__gshared, /// __gshared,
|
Inout, // inout,
|
||||||
tShared, // shared,
|
Scope, /// scope,
|
||||||
tImmutable, // immutable,
|
Shared, // shared,
|
||||||
tInout, // inout,
|
Static, /// static,
|
||||||
atDisable, /// @disable
|
Synchronized, /// synchronized,
|
||||||
ATTRIBUTES_END,
|
ATTRIBUTES_END,
|
||||||
tAlias, /// alias,
|
Alias, /// alias,
|
||||||
tAsm, /// asm,
|
Asm, /// asm,
|
||||||
tAssert, /// assert,
|
Assert, /// assert,
|
||||||
tBody, /// body,
|
Body, /// body,
|
||||||
tBreak, /// break,
|
Break, /// break,
|
||||||
tCase, /// case,
|
Case, /// case,
|
||||||
tCast, /// cast,
|
Cast, /// cast,
|
||||||
tCatch, /// catch,
|
Catch, /// catch,
|
||||||
tClass, /// class,
|
Class, /// class,
|
||||||
tContinue, /// continue,
|
Continue, /// continue,
|
||||||
tDebug, /// debug,
|
Debug, /// debug,
|
||||||
tDefault, /// default,
|
Default, /// default,
|
||||||
tDelegate, /// delegate,
|
Delegate, /// delegate,
|
||||||
tDelete, /// delete,
|
Delete, /// delete,
|
||||||
tDo, /// do,
|
Do, /// do,
|
||||||
tElse, /// else,
|
Else, /// else,
|
||||||
tEnum, /// enum,
|
Enum, /// enum,
|
||||||
tFalse, /// false,
|
False, /// false,
|
||||||
tFinally, /// finally,
|
Finally, /// finally,
|
||||||
tFor, /// for,
|
Foreach, /// foreach,
|
||||||
tForeach, /// foreach,
|
Foreach_reverse, /// foreach_reverse,
|
||||||
tForeach_reverse, /// foreach_reverse,
|
For, /// for,
|
||||||
tGoto, /// goto,
|
Goto, /// goto,
|
||||||
tIf, /// if ,
|
If, /// if ,
|
||||||
tImport, /// import,
|
Import, /// import,
|
||||||
tIn, /// in,
|
In, /// in,
|
||||||
tInterface, /// interface,
|
Interface, /// interface,
|
||||||
tInvariant, /// invariant,
|
Invariant, /// invariant,
|
||||||
tIs, /// is,
|
Is, /// is,
|
||||||
tLazy, /// lazy,
|
Lazy, /// lazy,
|
||||||
tMacro, /// macro,
|
Macro, /// macro,
|
||||||
tMixin, /// mixin,
|
Mixin, /// mixin,
|
||||||
tModule, /// module,
|
Module, /// module,
|
||||||
tNew, /// new,
|
New, /// new,
|
||||||
tNothrow, /// nothrow,
|
Nothrow, /// nothrow,
|
||||||
tNull, /// null,
|
Null, /// null,
|
||||||
tOut, /// out,
|
Out, /// out,
|
||||||
tOverride, /// override,
|
Override, /// override,
|
||||||
tPure, /// pure,
|
Pure, /// pure,
|
||||||
tRef, /// ref,
|
Ref, /// ref,
|
||||||
tReturn, /// return,
|
Return, /// return,
|
||||||
tStruct, /// struct,
|
Struct, /// struct,
|
||||||
tSuper, /// super,
|
Super, /// super,
|
||||||
tSwitch, /// switch ,
|
Switch, /// switch ,
|
||||||
tThis, /// this,
|
This, /// this,
|
||||||
tThrow, /// throw,
|
Throw, /// throw,
|
||||||
tTrue, /// true,
|
True, /// true,
|
||||||
tTry, /// try,
|
Try, /// try,
|
||||||
tTypedef, /// typedef,
|
Typedef, /// typedef,
|
||||||
tTypeid, /// typeid,
|
Typeid, /// typeid,
|
||||||
tTypeof, /// typeof,
|
Typeof, /// typeof,
|
||||||
tUnion, /// union,
|
Union, /// union,
|
||||||
tUnittest, /// unittest,
|
Unittest, /// unittest,
|
||||||
tVersion, /// version,
|
Version, /// version,
|
||||||
tVolatile, /// volatile,
|
Volatile, /// volatile,
|
||||||
tWhile, /// while ,
|
While, /// while ,
|
||||||
tWith, /// with,
|
With, /// with,
|
||||||
KEYWORDS_END,
|
KEYWORDS_END,
|
||||||
|
|
||||||
// Constants
|
// Constants
|
||||||
CONSTANTS_BEGIN,
|
CONSTANTS_BEGIN,
|
||||||
t__FILE__, /// __FILE__,
|
File, /// __FILE__,
|
||||||
t__LINE__, /// __LINE__,
|
Line, /// __LINE__,
|
||||||
|
Thread, /// __thread,
|
||||||
t__thread, /// __thread,
|
Traits, /// __traits,
|
||||||
t__traits, /// __traits,
|
|
||||||
CONSTANTS_END,
|
CONSTANTS_END,
|
||||||
|
|
||||||
// Properties
|
// Properties
|
||||||
PROPERTIES_BEGIN,
|
PROPERTIES_BEGIN,
|
||||||
|
AtProperty, /// @property
|
||||||
atProperty, /// @property
|
AtSafe, /// @safe
|
||||||
atSafe, /// @safe
|
AtSystem, /// @system
|
||||||
atSystem, /// @system
|
AtTrusted, /// @trusted
|
||||||
atTrusted, /// @trusted
|
|
||||||
PROPERTIES_END,
|
PROPERTIES_END,
|
||||||
|
|
||||||
// Misc
|
// Misc
|
||||||
MISC_BEGIN,
|
MISC_BEGIN,
|
||||||
scriptLine, // Line at the beginning of source file that starts from #!
|
Blank, /// unknown token type
|
||||||
comment, /// /** comment */ or // comment or ///comment
|
Comment, /// /** comment */ or // comment or ///comment
|
||||||
|
Identifier, /// anything else
|
||||||
|
ScriptLine, // Line at the beginning of source file that starts from #!
|
||||||
|
Whitespace, /// whitespace
|
||||||
NUMBERS_BEGIN,
|
NUMBERS_BEGIN,
|
||||||
floatLiteral, /// 123.456f or 0x123_45p-af
|
DoubleLiteral, /// 123.456
|
||||||
doubleLiteral, /// 123.456
|
FloatLiteral, /// 123.456f or 0x123_45p-af
|
||||||
realLiteral, /// 123.456L
|
IntLiteral, /// 123 or 0b1101010101
|
||||||
intLiteral, /// 123 or 0b1101010101
|
LongLiteral, /// 123L
|
||||||
unsignedIntLiteral, /// 123u
|
RealLiteral, /// 123.456L
|
||||||
longLiteral, /// 123L
|
UnsignedIntLiteral, /// 123u
|
||||||
unsignedLongLiteral, /// 123uL
|
UnsignedLongLiteral, /// 123uL
|
||||||
NUMBERS_END,
|
NUMBERS_END,
|
||||||
stringLiteral, /// "a string"
|
STRINGS_BEGIN,
|
||||||
identifier, /// anything else
|
DStringLiteral, /// "32-bit character string"d
|
||||||
whitespace, /// whitespace
|
StringLiteral, /// "a string"
|
||||||
blank, /// unknown token type
|
WStringLiteral, /// "16-bit character string"w
|
||||||
|
STRINGS_END,
|
||||||
MISC_END,
|
MISC_END,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -281,119 +284,121 @@ immutable TokenType[string] tokenLookup;
|
||||||
static this()
|
static this()
|
||||||
{
|
{
|
||||||
tokenLookup = [
|
tokenLookup = [
|
||||||
"abstract" : TokenType.tAbstract,
|
"abstract" : TokenType.Abstract,
|
||||||
"alias" : TokenType.tAlias,
|
"alias" : TokenType.Alias,
|
||||||
"align" : TokenType.tAlign,
|
"align" : TokenType.Align,
|
||||||
"asm" : TokenType.tAsm,
|
"asm" : TokenType.Asm,
|
||||||
"assert" : TokenType.tAssert,
|
"assert" : TokenType.Assert,
|
||||||
"auto" : TokenType.tAuto,
|
"auto" : TokenType.Auto,
|
||||||
"body" : TokenType.tBody,
|
"body" : TokenType.Body,
|
||||||
"bool" : TokenType.tBool,
|
"bool" : TokenType.Bool,
|
||||||
"break" : TokenType.tBreak,
|
"break" : TokenType.Break,
|
||||||
"byte" : TokenType.tByte,
|
"byte" : TokenType.Byte,
|
||||||
"case" : TokenType.tCase,
|
"case" : TokenType.Case,
|
||||||
"cast" : TokenType.tCast,
|
"cast" : TokenType.Cast,
|
||||||
"catch" : TokenType.tCatch,
|
"catch" : TokenType.Catch,
|
||||||
"cdouble" : TokenType.tCdouble,
|
"cdouble" : TokenType.Cdouble,
|
||||||
"cent" : TokenType.tCent,
|
"cent" : TokenType.Cent,
|
||||||
"cfloat" : TokenType.tCfloat,
|
"cfloat" : TokenType.Cfloat,
|
||||||
"char" : TokenType.tChar,
|
"char" : TokenType.Char,
|
||||||
"class" : TokenType.tClass,
|
"class" : TokenType.Class,
|
||||||
"const" : TokenType.tConst,
|
"const" : TokenType.Const,
|
||||||
"continue" : TokenType.tContinue,
|
"continue" : TokenType.Continue,
|
||||||
"creal" : TokenType.tCreal,
|
"creal" : TokenType.Creal,
|
||||||
"dchar" : TokenType.tDchar,
|
"dchar" : TokenType.Dchar,
|
||||||
"debug" : TokenType.tDebug,
|
"debug" : TokenType.Debug,
|
||||||
"default" : TokenType.tDefault,
|
"default" : TokenType.Default,
|
||||||
"delegate" : TokenType.tDelegate,
|
"delegate" : TokenType.Delegate,
|
||||||
"delete" : TokenType.tDelete,
|
"delete" : TokenType.Delete,
|
||||||
"deprecated" : TokenType.tDeprecated,
|
"deprecated" : TokenType.Deprecated,
|
||||||
"do" : TokenType.tDo,
|
"@disable" : TokenType.AtDisable,
|
||||||
"double" : TokenType.tDouble,
|
"do" : TokenType.Do,
|
||||||
"else" : TokenType.tElse,
|
"double" : TokenType.Double,
|
||||||
"enum" : TokenType.tEnum,
|
"dstring" : TokenType.DString,
|
||||||
"export" : TokenType.tExport,
|
"else" : TokenType.Else,
|
||||||
"extern" : TokenType.tExtern,
|
"enum" : TokenType.Enum,
|
||||||
"false" : TokenType.tFalse,
|
"export" : TokenType.Export,
|
||||||
"final" : TokenType.tFinal,
|
"extern" : TokenType.Extern,
|
||||||
"finally" : TokenType.tFinally,
|
"false" : TokenType.False,
|
||||||
"float" : TokenType.tFloat,
|
"__FILE__" : TokenType.File,
|
||||||
"for" : TokenType.tFor,
|
"finally" : TokenType.Finally,
|
||||||
"foreach" : TokenType.tForeach,
|
"final" : TokenType.Final,
|
||||||
"foreach_reverse" : TokenType.tForeach_reverse,
|
"float" : TokenType.Float,
|
||||||
"function" : TokenType.tFunction,
|
"foreach_reverse" : TokenType.Foreach_reverse,
|
||||||
"goto" : TokenType.tGoto,
|
"foreach" : TokenType.Foreach,
|
||||||
"idouble" : TokenType.tIdouble,
|
"for" : TokenType.For,
|
||||||
"if" : TokenType.tIf,
|
"function" : TokenType.Function,
|
||||||
"ifloat" : TokenType.tIfloat,
|
"goto" : TokenType.Goto,
|
||||||
"immutable" : TokenType.tImmutable,
|
"__gshared" : TokenType.Gshared,
|
||||||
"import" : TokenType.tImport,
|
"idouble" : TokenType.Idouble,
|
||||||
"in" : TokenType.tIn,
|
"ifloat" : TokenType.Ifloat,
|
||||||
"inout" : TokenType.tInout,
|
"if" : TokenType.If,
|
||||||
"int" : TokenType.tInt,
|
"immutable" : TokenType.Immutable,
|
||||||
"interface" : TokenType.tInterface,
|
"import" : TokenType.Import,
|
||||||
"invariant" : TokenType.tInvariant,
|
"inout" : TokenType.Inout,
|
||||||
"ireal" : TokenType.tIreal,
|
"interface" : TokenType.Interface,
|
||||||
"is" : TokenType.tIs,
|
"in" : TokenType.In,
|
||||||
"lazy" : TokenType.tLazy,
|
"int" : TokenType.Int,
|
||||||
"long" : TokenType.tLong,
|
"invariant" : TokenType.Invariant,
|
||||||
"macro" : TokenType.tMacro,
|
"ireal" : TokenType.Ireal,
|
||||||
"mixin" : TokenType.tMixin,
|
"is" : TokenType.Is,
|
||||||
"module" : TokenType.tModule,
|
"lazy" : TokenType.Lazy,
|
||||||
"new" : TokenType.tNew,
|
"__LINE__" : TokenType.Line,
|
||||||
"nothrow" : TokenType.tNothrow,
|
"long" : TokenType.Long,
|
||||||
"null" : TokenType.tNull,
|
"macro" : TokenType.Macro,
|
||||||
"out" : TokenType.tOut,
|
"mixin" : TokenType.Mixin,
|
||||||
"override" : TokenType.tOverride,
|
"module" : TokenType.Module,
|
||||||
"package" : TokenType.tPackage,
|
"new" : TokenType.New,
|
||||||
"pragma" : TokenType.tPragma,
|
"nothrow" : TokenType.Nothrow,
|
||||||
"private" : TokenType.tPrivate,
|
"null" : TokenType.Null,
|
||||||
"protected" : TokenType.tProtected,
|
"out" : TokenType.Out,
|
||||||
"public" : TokenType.tPublic,
|
"override" : TokenType.Override,
|
||||||
"pure" : TokenType.tPure,
|
"package" : TokenType.Package,
|
||||||
"real" : TokenType.tReal,
|
"pragma" : TokenType.Pragma,
|
||||||
"ref" : TokenType.tRef,
|
"private" : TokenType.Private,
|
||||||
"return" : TokenType.tReturn,
|
"@property" : TokenType.AtProperty,
|
||||||
"scope" : TokenType.tScope,
|
"protected" : TokenType.Protected,
|
||||||
"shared" : TokenType.tShared,
|
"public" : TokenType.Public,
|
||||||
"short" : TokenType.tShort,
|
"pure" : TokenType.Pure,
|
||||||
"static" : TokenType.tStatic,
|
"real" : TokenType.Real,
|
||||||
"struct" : TokenType.tStruct,
|
"ref" : TokenType.Ref,
|
||||||
"string" : TokenType.tString,
|
"return" : TokenType.Return,
|
||||||
"super" : TokenType.tSuper,
|
"@safe" : TokenType.AtSafe,
|
||||||
"switch" : TokenType.tSwitch,
|
"scope" : TokenType.Scope,
|
||||||
"synchronized" : TokenType.tSynchronized,
|
"shared" : TokenType.Shared,
|
||||||
"template" : TokenType.tTemplate,
|
"short" : TokenType.Short,
|
||||||
"this" : TokenType.tThis,
|
"static" : TokenType.Static,
|
||||||
"throw" : TokenType.tThrow,
|
"string" : TokenType.String,
|
||||||
"true" : TokenType.tTrue,
|
"struct" : TokenType.Struct,
|
||||||
"try" : TokenType.tTry,
|
"super" : TokenType.Super,
|
||||||
"typedef" : TokenType.tTypedef,
|
"switch" : TokenType.Switch,
|
||||||
"typeid" : TokenType.tTypeid,
|
"synchronized" : TokenType.Synchronized,
|
||||||
"typeof" : TokenType.tTypeof,
|
"@system" : TokenType.AtSystem,
|
||||||
"ubyte" : TokenType.tUbyte,
|
"template" : TokenType.Template,
|
||||||
"ucent" : TokenType.tUcent,
|
"this" : TokenType.This,
|
||||||
"uint" : TokenType.tUint,
|
"__thread" : TokenType.Thread,
|
||||||
"ulong" : TokenType.tUlong,
|
"throw" : TokenType.Throw,
|
||||||
"union" : TokenType.tUnion,
|
"__traits" : TokenType.Traits,
|
||||||
"unittest" : TokenType.tUnittest,
|
"true" : TokenType.True,
|
||||||
"ushort" : TokenType.tUshort,
|
"@trusted" : TokenType.AtTrusted,
|
||||||
"version" : TokenType.tVersion,
|
"try" : TokenType.Try,
|
||||||
"void" : TokenType.tVoid,
|
"typedef" : TokenType.Typedef,
|
||||||
"volatile" : TokenType.tVolatile,
|
"typeid" : TokenType.Typeid,
|
||||||
"wchar" : TokenType.tWchar,
|
"typeof" : TokenType.Typeof,
|
||||||
"while" : TokenType.tWhile,
|
"ubyte" : TokenType.Ubyte,
|
||||||
"with" : TokenType.tWith,
|
"ucent" : TokenType.Ucent,
|
||||||
"__FILE__" : TokenType.t__FILE__,
|
"uint" : TokenType.Uint,
|
||||||
"__LINE__" : TokenType.t__LINE__,
|
"ulong" : TokenType.Ulong,
|
||||||
"__gshared" : TokenType.t__gshared,
|
"union" : TokenType.Union,
|
||||||
"__thread" : TokenType.t__thread,
|
"unittest" : TokenType.Unittest,
|
||||||
"__traits" : TokenType.t__traits,
|
"ushort" : TokenType.Ushort,
|
||||||
"@disable" : TokenType.atDisable,
|
"version" : TokenType.Version,
|
||||||
"@property" : TokenType.atProperty,
|
"void" : TokenType.Void,
|
||||||
"@safe" : TokenType.atSafe,
|
"volatile" : TokenType.Volatile,
|
||||||
"@system" : TokenType.atSystem,
|
"wchar" : TokenType.Wchar,
|
||||||
"@trusted" : TokenType.atTrusted,
|
"while" : TokenType.While,
|
||||||
|
"with" : TokenType.With,
|
||||||
|
"wstring" : TokenType.WString,
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
13
main.d
13
main.d
|
|
@ -27,12 +27,13 @@ pure bool isLineOfCode(TokenType t)
|
||||||
{
|
{
|
||||||
switch(t)
|
switch(t)
|
||||||
{
|
{
|
||||||
case TokenType.semicolon:
|
case TokenType.Semicolon:
|
||||||
case TokenType.tWhile:
|
case TokenType.While:
|
||||||
case TokenType.tIf:
|
case TokenType.If:
|
||||||
case TokenType.tFor:
|
case TokenType.For:
|
||||||
case TokenType.tForeach:
|
case TokenType.Foreach:
|
||||||
case TokenType.tCase:
|
case TokenType.Foreach_reverse:
|
||||||
|
case TokenType.Case:
|
||||||
return true;
|
return true;
|
||||||
default:
|
default:
|
||||||
return false;
|
return false;
|
||||||
|
|
|
||||||
282
parser.d
282
parser.d
|
|
@ -51,7 +51,7 @@ body
|
||||||
*/
|
*/
|
||||||
const(Token)[] betweenBalancedBraces(const Token[] tokens, ref size_t index)
|
const(Token)[] betweenBalancedBraces(const Token[] tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
return betweenBalanced(tokens, index, TokenType.lBrace, TokenType.rBrace);
|
return betweenBalanced(tokens, index, TokenType.LBrace, TokenType.RBrace);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -60,7 +60,7 @@ const(Token)[] betweenBalancedBraces(const Token[] tokens, ref size_t index)
|
||||||
*/
|
*/
|
||||||
const(Token)[] betweenBalancedParens(const Token[] tokens, ref size_t index)
|
const(Token)[] betweenBalancedParens(const Token[] tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
return betweenBalanced(tokens, index, TokenType.lParen, TokenType.rParen);
|
return betweenBalanced(tokens, index, TokenType.LParen, TokenType.RParen);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -69,20 +69,27 @@ const(Token)[] betweenBalancedParens(const Token[] tokens, ref size_t index)
|
||||||
*/
|
*/
|
||||||
const(Token)[] betweenBalancedBrackets(const Token[] tokens, ref size_t index)
|
const(Token)[] betweenBalancedBrackets(const Token[] tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
return betweenBalanced(tokens, index, TokenType.lBracket, TokenType.rBracket);
|
return betweenBalanced(tokens, index, TokenType.LBracket, TokenType.RBracket);
|
||||||
}
|
}
|
||||||
|
|
||||||
void skipBalanced(alias Op, alias Cl)(const Token[] tokens, ref size_t index)
|
|
||||||
|
/**
|
||||||
|
* If tokens[index] is currently openToken, advances index until it refers to a
|
||||||
|
* location in tokens directly after the balanced occurance of closeToken. If
|
||||||
|
* tokens[index] is closeToken, decrements index
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
void skipBalanced(alias openToken, alias closeToken)(const Token[] tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
int depth = tokens[index] == Op ? 1 : -1;
|
int depth = tokens[index] == openToken ? 1 : -1;
|
||||||
int deltaIndex = depth;
|
int deltaIndex = depth;
|
||||||
index += deltaIndex;
|
index += deltaIndex;
|
||||||
for (; index < tokens.length && index > 0 && depth != 0; index += deltaIndex)
|
for (; index < tokens.length && index > 0 && depth != 0; index += deltaIndex)
|
||||||
{
|
{
|
||||||
switch (tokens[index].type)
|
switch (tokens[index].type)
|
||||||
{
|
{
|
||||||
case Op: ++depth; break;
|
case openToken: ++depth; break;
|
||||||
case Cl: --depth; break;
|
case closeToken: --depth; break;
|
||||||
default: break;
|
default: break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -90,12 +97,17 @@ void skipBalanced(alias Op, alias Cl)(const Token[] tokens, ref size_t index)
|
||||||
|
|
||||||
void skipParens(const Token[] tokens, ref size_t index)
|
void skipParens(const Token[] tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
skipBalanced!(TokenType.lParen, TokenType.rParen)(tokens, index);
|
skipBalanced!(TokenType.LParen, TokenType.RParen)(tokens, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
void skipBrackets(const Token[] tokens, ref size_t index)
|
void skipBrackets(const Token[] tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
skipBalanced!(TokenType.lBracket, TokenType.rBracket)(tokens, index);
|
skipBalanced!(TokenType.LBracket, TokenType.RBracket)(tokens, index);
|
||||||
|
}
|
||||||
|
|
||||||
|
void skipBraces(const Token[] tokens, ref size_t index)
|
||||||
|
{
|
||||||
|
skipBalanced!(TokenType.LBrace, TokenType.RBrace)(tokens, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -122,7 +134,7 @@ body
|
||||||
{
|
{
|
||||||
if (tokens[index] == open) ++depth;
|
if (tokens[index] == open) ++depth;
|
||||||
else if (tokens[index] == close) --depth;
|
else if (tokens[index] == close) --depth;
|
||||||
else if (tokens[index] == TokenType.comma)
|
else if (tokens[index] == TokenType.Comma)
|
||||||
{
|
{
|
||||||
app.put(", ");
|
app.put(", ");
|
||||||
}
|
}
|
||||||
|
|
@ -139,7 +151,7 @@ body
|
||||||
*/
|
*/
|
||||||
string parenContent(const Token[]tokens, ref size_t index)
|
string parenContent(const Token[]tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
return "(" ~ content(tokens, index, TokenType.lParen, TokenType.rParen) ~ ")";
|
return "(" ~ content(tokens, index, TokenType.LParen, TokenType.RParen) ~ ")";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -148,7 +160,7 @@ string parenContent(const Token[]tokens, ref size_t index)
|
||||||
*/
|
*/
|
||||||
string bracketContent(const Token[]tokens, ref size_t index)
|
string bracketContent(const Token[]tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
return "[" ~ content(tokens, index, TokenType.lBracket, TokenType.rBracket) ~ "]";
|
return "[" ~ content(tokens, index, TokenType.LBracket, TokenType.RBracket) ~ "]";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -159,11 +171,11 @@ string bracketContent(const Token[]tokens, ref size_t index)
|
||||||
*/
|
*/
|
||||||
void skipBlockStatement(const Token[] tokens, ref size_t index)
|
void skipBlockStatement(const Token[] tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
if (tokens[index] == TokenType.lBrace)
|
if (tokens[index] == TokenType.LBrace)
|
||||||
betweenBalancedBraces(tokens, index);
|
betweenBalancedBraces(tokens, index);
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
skipPastNext(tokens, TokenType.semicolon, index);
|
skipPastNext(tokens, TokenType.Semicolon, index);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -177,11 +189,11 @@ void skipPastNext(const Token[] tokens, TokenType type, ref size_t index)
|
||||||
{
|
{
|
||||||
while (index < tokens.length)
|
while (index < tokens.length)
|
||||||
{
|
{
|
||||||
if (tokens[index].type == TokenType.lBrace)
|
if (tokens[index].type == TokenType.LBrace)
|
||||||
betweenBalancedBraces(tokens, index);
|
betweenBalancedBraces(tokens, index);
|
||||||
else if (tokens[index].type == TokenType.lParen)
|
else if (tokens[index].type == TokenType.LParen)
|
||||||
betweenBalancedParens(tokens, index);
|
betweenBalancedParens(tokens, index);
|
||||||
else if (tokens[index].type == TokenType.lBracket)
|
else if (tokens[index].type == TokenType.LBracket)
|
||||||
betweenBalancedBrackets(tokens, index);
|
betweenBalancedBrackets(tokens, index);
|
||||||
else if (tokens[index].type == type)
|
else if (tokens[index].type == type)
|
||||||
{
|
{
|
||||||
|
|
@ -200,18 +212,18 @@ string parseTypeDeclaration(const Token[] tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
switch (tokens[index].type)
|
switch (tokens[index].type)
|
||||||
{
|
{
|
||||||
case TokenType.lBracket:
|
case TokenType.LBracket:
|
||||||
type ~= bracketContent(tokens, index);
|
type ~= bracketContent(tokens, index);
|
||||||
break;
|
break;
|
||||||
case TokenType.not:
|
case TokenType.Not:
|
||||||
type ~= tokens[index++].value;
|
type ~= tokens[index++].value;
|
||||||
if (tokens[index] == TokenType.lParen)
|
if (tokens[index] == TokenType.LParen)
|
||||||
type ~= parenContent(tokens, index);
|
type ~= parenContent(tokens, index);
|
||||||
else
|
else
|
||||||
type ~= tokens[index++].value;
|
type ~= tokens[index++].value;
|
||||||
break;
|
break;
|
||||||
case TokenType.star:
|
case TokenType.Star:
|
||||||
case TokenType.bitAnd:
|
case TokenType.BitAnd:
|
||||||
type ~= tokens[index++].value;
|
type ~= tokens[index++].value;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
|
@ -249,72 +261,72 @@ Module parseModule(const Token[] tokens, string protection = "public", string[]
|
||||||
{
|
{
|
||||||
switch(tokens[index].type)
|
switch(tokens[index].type)
|
||||||
{
|
{
|
||||||
case TokenType.tElse:
|
case TokenType.Else:
|
||||||
case TokenType.tMixin:
|
case TokenType.Mixin:
|
||||||
case TokenType.tAssert:
|
case TokenType.Assert:
|
||||||
++index;
|
++index;
|
||||||
tokens.skipBlockStatement(index);
|
tokens.skipBlockStatement(index);
|
||||||
break;
|
break;
|
||||||
case TokenType.tAlias:
|
case TokenType.Alias:
|
||||||
tokens.skipBlockStatement(index);
|
tokens.skipBlockStatement(index);
|
||||||
break;
|
break;
|
||||||
case TokenType.tImport:
|
case TokenType.Import:
|
||||||
mod.imports ~= parseImports(tokens, index);
|
mod.imports ~= parseImports(tokens, index);
|
||||||
resetLocals();
|
resetLocals();
|
||||||
break;
|
break;
|
||||||
case TokenType.tVersion:
|
case TokenType.Version:
|
||||||
++index;
|
++index;
|
||||||
if (tokens[index] == TokenType.lParen)
|
if (tokens[index] == TokenType.LParen)
|
||||||
{
|
{
|
||||||
tokens.betweenBalancedParens(index);
|
tokens.betweenBalancedParens(index);
|
||||||
if (tokens[index] == TokenType.lBrace)
|
if (tokens[index] == TokenType.LBrace)
|
||||||
mod.merge(parseModule(betweenBalancedBraces(tokens, index),
|
mod.merge(parseModule(betweenBalancedBraces(tokens, index),
|
||||||
localProtection.empty() ? protection : localProtection,
|
localProtection.empty() ? protection : localProtection,
|
||||||
attributes));
|
attributes));
|
||||||
}
|
}
|
||||||
else if (tokens[index] == TokenType.assign)
|
else if (tokens[index] == TokenType.Assign)
|
||||||
tokens.skipBlockStatement(index);
|
tokens.skipBlockStatement(index);
|
||||||
break;
|
break;
|
||||||
case TokenType.tDeprecated:
|
case TokenType.Deprecated:
|
||||||
case TokenType.tNothrow:
|
case TokenType.Nothrow:
|
||||||
case TokenType.tOverride:
|
case TokenType.Override:
|
||||||
case TokenType.tSynchronized:
|
case TokenType.Synchronized:
|
||||||
case TokenType.atDisable:
|
case TokenType.AtDisable:
|
||||||
case TokenType.atProperty:
|
case TokenType.AtProperty:
|
||||||
case TokenType.atSafe:
|
case TokenType.AtSafe:
|
||||||
case TokenType.atSystem:
|
case TokenType.AtSystem:
|
||||||
case TokenType.tAbstract:
|
case TokenType.Abstract:
|
||||||
case TokenType.tFinal:
|
case TokenType.Final:
|
||||||
case TokenType.t__gshared:
|
case TokenType.Gshared:
|
||||||
case TokenType.tStatic:
|
case TokenType.Static:
|
||||||
localAttributes ~= tokens[index++].value;
|
localAttributes ~= tokens[index++].value;
|
||||||
break;
|
break;
|
||||||
case TokenType.tConst:
|
case TokenType.Const:
|
||||||
case TokenType.tImmutable:
|
case TokenType.Immutable:
|
||||||
case TokenType.tInout:
|
case TokenType.Inout:
|
||||||
case TokenType.tPure:
|
case TokenType.Pure:
|
||||||
case TokenType.tScope:
|
case TokenType.Scope:
|
||||||
case TokenType.tShared:
|
case TokenType.Shared:
|
||||||
auto tmp = tokens[index++].value;
|
auto tmp = tokens[index++].value;
|
||||||
if (tokens[index] == TokenType.lParen)
|
if (tokens[index] == TokenType.LParen)
|
||||||
type = tmp ~ parenContent(tokens, index);
|
type = tmp ~ parenContent(tokens, index);
|
||||||
else if (tokens[index] == TokenType.colon)
|
else if (tokens[index] == TokenType.Colon)
|
||||||
{
|
{
|
||||||
index++;
|
index++;
|
||||||
attributes ~= tmp;
|
attributes ~= tmp;
|
||||||
}
|
}
|
||||||
localAttributes ~= tmp;
|
localAttributes ~= tmp;
|
||||||
break;
|
break;
|
||||||
case TokenType.tAlign:
|
case TokenType.Align:
|
||||||
case TokenType.tExtern:
|
case TokenType.Extern:
|
||||||
string attribute = tokens[index++].value;
|
string attribute = tokens[index++].value;
|
||||||
if (tokens[index] == TokenType.lParen)
|
if (tokens[index] == TokenType.LParen)
|
||||||
attribute ~= parenContent(tokens, index);
|
attribute ~= parenContent(tokens, index);
|
||||||
if (tokens[index] == TokenType.lBrace)
|
if (tokens[index] == TokenType.LBrace)
|
||||||
mod.merge(parseModule(betweenBalancedBraces(tokens, index),
|
mod.merge(parseModule(betweenBalancedBraces(tokens, index),
|
||||||
localProtection.empty() ? protection : localProtection,
|
localProtection.empty() ? protection : localProtection,
|
||||||
attributes ~ attribute));
|
attributes ~ attribute));
|
||||||
else if (tokens[index] == TokenType.colon)
|
else if (tokens[index] == TokenType.Colon)
|
||||||
{
|
{
|
||||||
++index;
|
++index;
|
||||||
attributes ~= attribute;
|
attributes ~= attribute;
|
||||||
|
|
@ -324,66 +336,66 @@ Module parseModule(const Token[] tokens, string protection = "public", string[]
|
||||||
break;
|
break;
|
||||||
case TokenType.PROTECTION_BEGIN: .. case TokenType.PROTECTION_END:
|
case TokenType.PROTECTION_BEGIN: .. case TokenType.PROTECTION_END:
|
||||||
string p = tokens[index++].value;
|
string p = tokens[index++].value;
|
||||||
if (tokens[index] == TokenType.colon)
|
if (tokens[index] == TokenType.Colon)
|
||||||
{
|
{
|
||||||
protection = p;
|
protection = p;
|
||||||
++index;
|
++index;
|
||||||
}
|
}
|
||||||
else if (tokens[index] == TokenType.lBrace)
|
else if (tokens[index] == TokenType.LBrace)
|
||||||
mod.merge(parseModule(betweenBalancedBraces(tokens, index),
|
mod.merge(parseModule(betweenBalancedBraces(tokens, index),
|
||||||
p, attributes ~ localAttributes));
|
p, attributes ~ localAttributes));
|
||||||
else
|
else
|
||||||
localProtection = p;
|
localProtection = p;
|
||||||
break;
|
break;
|
||||||
case TokenType.tModule:
|
case TokenType.Module:
|
||||||
++index;
|
++index;
|
||||||
while (index < tokens.length && tokens[index] != TokenType.semicolon)
|
while (index < tokens.length && tokens[index] != TokenType.Semicolon)
|
||||||
mod.name ~= tokens[index++].value;
|
mod.name ~= tokens[index++].value;
|
||||||
++index;
|
++index;
|
||||||
resetLocals();
|
resetLocals();
|
||||||
break;
|
break;
|
||||||
case TokenType.tUnion:
|
case TokenType.Union:
|
||||||
mod.unions ~= parseUnion(tokens, index,
|
mod.unions ~= parseUnion(tokens, index,
|
||||||
localProtection.empty() ? protection : localProtection,
|
localProtection.empty() ? protection : localProtection,
|
||||||
localAttributes ~ attributes);
|
localAttributes ~ attributes);
|
||||||
resetLocals();
|
resetLocals();
|
||||||
break;
|
break;
|
||||||
case TokenType.tClass:
|
case TokenType.Class:
|
||||||
mod.classes ~= parseClass(tokens, index,
|
mod.classes ~= parseClass(tokens, index,
|
||||||
localProtection.empty() ? protection : localProtection,
|
localProtection.empty() ? protection : localProtection,
|
||||||
localAttributes ~ attributes);
|
localAttributes ~ attributes);
|
||||||
resetLocals();
|
resetLocals();
|
||||||
break;
|
break;
|
||||||
case TokenType.tInterface:
|
case TokenType.Interface:
|
||||||
mod.interfaces ~= parseInterface(tokens, index,
|
mod.interfaces ~= parseInterface(tokens, index,
|
||||||
localProtection.empty() ? protection : localProtection,
|
localProtection.empty() ? protection : localProtection,
|
||||||
localAttributes ~ attributes);
|
localAttributes ~ attributes);
|
||||||
resetLocals();
|
resetLocals();
|
||||||
break;
|
break;
|
||||||
case TokenType.tStruct:
|
case TokenType.Struct:
|
||||||
mod.structs ~= parseStruct(tokens, index,
|
mod.structs ~= parseStruct(tokens, index,
|
||||||
localProtection.empty() ? protection : localProtection,
|
localProtection.empty() ? protection : localProtection,
|
||||||
localAttributes ~ attributes);
|
localAttributes ~ attributes);
|
||||||
resetLocals();
|
resetLocals();
|
||||||
break;
|
break;
|
||||||
case TokenType.tEnum:
|
case TokenType.Enum:
|
||||||
mod.enums ~= parseEnum(tokens, index,
|
mod.enums ~= parseEnum(tokens, index,
|
||||||
localProtection.empty() ? protection : localProtection,
|
localProtection.empty() ? protection : localProtection,
|
||||||
localAttributes ~ attributes);
|
localAttributes ~ attributes);
|
||||||
resetLocals();
|
resetLocals();
|
||||||
break;
|
break;
|
||||||
case TokenType.tTemplate:
|
case TokenType.Template:
|
||||||
++index; // template
|
++index; // template
|
||||||
++index; // name
|
++index; // name
|
||||||
if (tokens[index] == TokenType.lParen)
|
if (tokens[index] == TokenType.LParen)
|
||||||
tokens.betweenBalancedParens(index); // params
|
tokens.betweenBalancedParens(index); // params
|
||||||
if (tokens[index] == TokenType.lBrace)
|
if (tokens[index] == TokenType.LBrace)
|
||||||
tokens.betweenBalancedBraces(index); // body
|
tokens.betweenBalancedBraces(index); // body
|
||||||
resetLocals();
|
resetLocals();
|
||||||
break;
|
break;
|
||||||
case TokenType.TYPES_BEGIN: .. case TokenType.TYPES_END:
|
case TokenType.TYPES_BEGIN: .. case TokenType.TYPES_END:
|
||||||
case TokenType.tAuto:
|
case TokenType.Auto:
|
||||||
case TokenType.identifier:
|
case TokenType.Identifier:
|
||||||
if (type.empty())
|
if (type.empty())
|
||||||
{
|
{
|
||||||
type = tokens.parseTypeDeclaration(index);
|
type = tokens.parseTypeDeclaration(index);
|
||||||
|
|
@ -392,7 +404,7 @@ Module parseModule(const Token[] tokens, string protection = "public", string[]
|
||||||
{
|
{
|
||||||
name = tokens[index++].value;
|
name = tokens[index++].value;
|
||||||
if (index >= tokens.length) break;
|
if (index >= tokens.length) break;
|
||||||
if (tokens[index] == TokenType.lParen)
|
if (tokens[index] == TokenType.LParen)
|
||||||
{
|
{
|
||||||
mod.functions ~= parseFunction(tokens, index, type, name,
|
mod.functions ~= parseFunction(tokens, index, type, name,
|
||||||
tokens[index].lineNumber,
|
tokens[index].lineNumber,
|
||||||
|
|
@ -412,23 +424,23 @@ Module parseModule(const Token[] tokens, string protection = "public", string[]
|
||||||
resetLocals();
|
resetLocals();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case TokenType.tUnittest:
|
case TokenType.Unittest:
|
||||||
++index;
|
++index;
|
||||||
if (!tokens.empty() && tokens[index] == TokenType.lBrace)
|
if (!tokens.empty() && tokens[index] == TokenType.LBrace)
|
||||||
tokens.skipBlockStatement(index);
|
tokens.skipBlockStatement(index);
|
||||||
resetLocals();
|
resetLocals();
|
||||||
break;
|
break;
|
||||||
case TokenType.tilde:
|
case TokenType.Tilde:
|
||||||
++index;
|
++index;
|
||||||
if (tokens[index] == TokenType.tThis)
|
if (tokens[index] == TokenType.This)
|
||||||
{
|
{
|
||||||
name = "~";
|
name = "~";
|
||||||
goto case;
|
goto case;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case TokenType.tThis:
|
case TokenType.This:
|
||||||
name ~= tokens[index++].value;
|
name ~= tokens[index++].value;
|
||||||
if (tokens[index] == TokenType.lParen)
|
if (tokens[index] == TokenType.LParen)
|
||||||
{
|
{
|
||||||
mod.functions ~= parseFunction(tokens, index, "", name,
|
mod.functions ~= parseFunction(tokens, index, "", name,
|
||||||
tokens[index - 1].lineNumber,
|
tokens[index - 1].lineNumber,
|
||||||
|
|
@ -453,7 +465,7 @@ Module parseModule(const Token[] tokens, string protection = "public", string[]
|
||||||
*/
|
*/
|
||||||
string[] parseImports(const Token[] tokens, ref size_t index)
|
string[] parseImports(const Token[] tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
assert(tokens[index] == TokenType.tImport);
|
assert(tokens[index] == TokenType.Import);
|
||||||
++index;
|
++index;
|
||||||
auto app = appender!(string[])();
|
auto app = appender!(string[])();
|
||||||
string im;
|
string im;
|
||||||
|
|
@ -461,17 +473,17 @@ string[] parseImports(const Token[] tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
switch(tokens[index].type)
|
switch(tokens[index].type)
|
||||||
{
|
{
|
||||||
case TokenType.comma:
|
case TokenType.Comma:
|
||||||
++index;
|
++index;
|
||||||
app.put(im);
|
app.put(im);
|
||||||
im = "";
|
im = "";
|
||||||
break;
|
break;
|
||||||
case TokenType.assign:
|
case TokenType.Assign:
|
||||||
case TokenType.semicolon:
|
case TokenType.Semicolon:
|
||||||
app.put(im);
|
app.put(im);
|
||||||
++index;
|
++index;
|
||||||
return app.data;
|
return app.data;
|
||||||
case TokenType.colon:
|
case TokenType.Colon:
|
||||||
app.put(im);
|
app.put(im);
|
||||||
tokens.skipBlockStatement(index);
|
tokens.skipBlockStatement(index);
|
||||||
return app.data;
|
return app.data;
|
||||||
|
|
@ -491,7 +503,7 @@ Enum parseEnum(const Token[] tokens, ref size_t index, string protection,
|
||||||
string[] attributes)
|
string[] attributes)
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert (tokens[index] == TokenType.tEnum);
|
assert (tokens[index] == TokenType.Enum);
|
||||||
}
|
}
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
|
|
@ -500,7 +512,7 @@ body
|
||||||
e.line = tokens[index].lineNumber;
|
e.line = tokens[index].lineNumber;
|
||||||
e.name = tokens[index++].value;
|
e.name = tokens[index++].value;
|
||||||
|
|
||||||
if (tokens[index] == TokenType.colon)
|
if (tokens[index] == TokenType.Colon)
|
||||||
{
|
{
|
||||||
++index;
|
++index;
|
||||||
e.type = tokens[index++].value;
|
e.type = tokens[index++].value;
|
||||||
|
|
@ -508,7 +520,7 @@ body
|
||||||
else
|
else
|
||||||
e.type = "uint";
|
e.type = "uint";
|
||||||
|
|
||||||
if (tokens[index] != TokenType.lBrace)
|
if (tokens[index] != TokenType.LBrace)
|
||||||
{
|
{
|
||||||
tokens.skipBlockStatement(index);
|
tokens.skipBlockStatement(index);
|
||||||
return e;
|
return e;
|
||||||
|
|
@ -517,13 +529,13 @@ body
|
||||||
auto r = betweenBalancedBraces(tokens, index);
|
auto r = betweenBalancedBraces(tokens, index);
|
||||||
for (size_t i = 0; i < r.length;)
|
for (size_t i = 0; i < r.length;)
|
||||||
{
|
{
|
||||||
if (r[i].type == TokenType.identifier)
|
if (r[i].type == TokenType.Identifier)
|
||||||
{
|
{
|
||||||
EnumMember member;
|
EnumMember member;
|
||||||
member.line = r[i].lineNumber;
|
member.line = r[i].lineNumber;
|
||||||
member.name = r[i].value;
|
member.name = r[i].value;
|
||||||
e.members ~= member;
|
e.members ~= member;
|
||||||
r.skipPastNext(TokenType.comma, i);
|
r.skipPastNext(TokenType.Comma, i);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
++i;
|
++i;
|
||||||
|
|
@ -539,7 +551,7 @@ Function parseFunction(const Token[] tokens, ref size_t index, string type,
|
||||||
string name, uint line, string protection, string[] attributes)
|
string name, uint line, string protection, string[] attributes)
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert (tokens[index] == TokenType.lParen);
|
assert (tokens[index] == TokenType.LParen);
|
||||||
}
|
}
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
|
|
@ -550,7 +562,7 @@ body
|
||||||
f.attributes.insertInPlace(f.attributes.length, attributes);
|
f.attributes.insertInPlace(f.attributes.length, attributes);
|
||||||
|
|
||||||
Variable[] vars1 = parseParameters(tokens, index);
|
Variable[] vars1 = parseParameters(tokens, index);
|
||||||
if (tokens[index] == TokenType.lParen)
|
if (tokens[index] == TokenType.LParen)
|
||||||
{
|
{
|
||||||
f.templateParameters.insertInPlace(f.templateParameters.length,
|
f.templateParameters.insertInPlace(f.templateParameters.length,
|
||||||
map!("a.type")(vars1));
|
map!("a.type")(vars1));
|
||||||
|
|
@ -564,14 +576,14 @@ body
|
||||||
{
|
{
|
||||||
switch (tokens[index].type)
|
switch (tokens[index].type)
|
||||||
{
|
{
|
||||||
case TokenType.tImmutable:
|
case TokenType.Immutable:
|
||||||
case TokenType.tConst:
|
case TokenType.Const:
|
||||||
case TokenType.tPure:
|
case TokenType.Pure:
|
||||||
case TokenType.atTrusted:
|
case TokenType.AtTrusted:
|
||||||
case TokenType.atProperty:
|
case TokenType.AtProperty:
|
||||||
case TokenType.tNothrow:
|
case TokenType.Nothrow:
|
||||||
case TokenType.tFinal:
|
case TokenType.Final:
|
||||||
case TokenType.tOverride:
|
case TokenType.Override:
|
||||||
f.attributes ~= tokens[index++].value;
|
f.attributes ~= tokens[index++].value;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
|
@ -579,21 +591,21 @@ body
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tokens[index] == TokenType.tIf)
|
if (tokens[index] == TokenType.If)
|
||||||
f.constraint = parseConstraint(tokens, index);
|
f.constraint = parseConstraint(tokens, index);
|
||||||
while (index < tokens.length &&
|
while (index < tokens.length &&
|
||||||
(tokens[index] == TokenType.tIn || tokens[index] == TokenType.tOut
|
(tokens[index] == TokenType.In || tokens[index] == TokenType.Out
|
||||||
|| tokens[index] == TokenType.tBody))
|
|| tokens[index] == TokenType.Body))
|
||||||
{
|
{
|
||||||
++index;
|
++index;
|
||||||
if (index < tokens.length && tokens[index] == TokenType.lBrace)
|
if (index < tokens.length && tokens[index] == TokenType.LBrace)
|
||||||
tokens.skipBlockStatement(index);
|
tokens.skipBlockStatement(index);
|
||||||
}
|
}
|
||||||
if (index >= tokens.length)
|
if (index >= tokens.length)
|
||||||
return f;
|
return f;
|
||||||
if (tokens[index] == TokenType.lBrace)
|
if (tokens[index] == TokenType.LBrace)
|
||||||
tokens.skipBlockStatement(index);
|
tokens.skipBlockStatement(index);
|
||||||
else if (tokens[index] == TokenType.semicolon)
|
else if (tokens[index] == TokenType.Semicolon)
|
||||||
++index;
|
++index;
|
||||||
return f;
|
return f;
|
||||||
}
|
}
|
||||||
|
|
@ -601,16 +613,16 @@ body
|
||||||
string parseConstraint(const Token[] tokens, ref size_t index)
|
string parseConstraint(const Token[] tokens, ref size_t index)
|
||||||
{
|
{
|
||||||
auto appender = appender!(string)();
|
auto appender = appender!(string)();
|
||||||
assert(tokens[index] == TokenType.tIf);
|
assert(tokens[index] == TokenType.If);
|
||||||
appender.put(tokens[index++].value);
|
appender.put(tokens[index++].value);
|
||||||
assert(tokens[index] == TokenType.lParen);
|
assert(tokens[index] == TokenType.LParen);
|
||||||
return "if " ~ parenContent(tokens, index);
|
return "if " ~ parenContent(tokens, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Variable[] parseParameters(const Token[] tokens, ref size_t index)
|
Variable[] parseParameters(const Token[] tokens, ref size_t index)
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert (tokens[index] == TokenType.lParen);
|
assert (tokens[index] == TokenType.LParen);
|
||||||
}
|
}
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
|
|
@ -622,28 +634,28 @@ body
|
||||||
{
|
{
|
||||||
switch(r[i].type)
|
switch(r[i].type)
|
||||||
{
|
{
|
||||||
case TokenType.tIn:
|
case TokenType.In:
|
||||||
case TokenType.tOut:
|
case TokenType.Out:
|
||||||
case TokenType.tRef:
|
case TokenType.Ref:
|
||||||
case TokenType.tScope:
|
case TokenType.Scope:
|
||||||
case TokenType.tLazy:
|
case TokenType.Lazy:
|
||||||
case TokenType.tConst:
|
case TokenType.Const:
|
||||||
case TokenType.tImmutable:
|
case TokenType.Immutable:
|
||||||
case TokenType.tShared:
|
case TokenType.Shared:
|
||||||
case TokenType.tInout:
|
case TokenType.Inout:
|
||||||
auto tmp = r[i++].value;
|
auto tmp = r[i++].value;
|
||||||
if (r[i] == TokenType.lParen)
|
if (r[i] == TokenType.LParen)
|
||||||
v.type ~= tmp ~ parenContent(r, i);
|
v.type ~= tmp ~ parenContent(r, i);
|
||||||
else
|
else
|
||||||
v.attributes ~= tmp;
|
v.attributes ~= tmp;
|
||||||
break;
|
break;
|
||||||
case TokenType.colon:
|
case TokenType.Colon:
|
||||||
i++;
|
i++;
|
||||||
r.skipPastNext(TokenType.comma, i);
|
r.skipPastNext(TokenType.Comma, i);
|
||||||
appender.put(v);
|
appender.put(v);
|
||||||
v = new Variable;
|
v = new Variable;
|
||||||
break;
|
break;
|
||||||
case TokenType.comma:
|
case TokenType.Comma:
|
||||||
++i;
|
++i;
|
||||||
appender.put(v);
|
appender.put(v);
|
||||||
v = new Variable;
|
v = new Variable;
|
||||||
|
|
@ -660,12 +672,12 @@ body
|
||||||
v.line = r[i].lineNumber;
|
v.line = r[i].lineNumber;
|
||||||
v.name = r[i++].value;
|
v.name = r[i++].value;
|
||||||
appender.put(v);
|
appender.put(v);
|
||||||
if (i < r.length && r[i] == TokenType.vararg)
|
if (i < r.length && r[i] == TokenType.Vararg)
|
||||||
{
|
{
|
||||||
v.type ~= " ...";
|
v.type ~= " ...";
|
||||||
}
|
}
|
||||||
v = new Variable;
|
v = new Variable;
|
||||||
r.skipPastNext(TokenType.comma, i);
|
r.skipPastNext(TokenType.Comma, i);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
@ -676,7 +688,7 @@ body
|
||||||
string[] parseBaseClassList(const Token[] tokens, ref size_t index)
|
string[] parseBaseClassList(const Token[] tokens, ref size_t index)
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(tokens[index] == TokenType.colon);
|
assert(tokens[index] == TokenType.Colon);
|
||||||
}
|
}
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
|
|
@ -684,11 +696,11 @@ body
|
||||||
++index;
|
++index;
|
||||||
while (index < tokens.length)
|
while (index < tokens.length)
|
||||||
{
|
{
|
||||||
if (tokens[index] == TokenType.identifier)
|
if (tokens[index] == TokenType.Identifier)
|
||||||
{
|
{
|
||||||
string base = parseTypeDeclaration(tokens, index);
|
string base = parseTypeDeclaration(tokens, index);
|
||||||
appender.put(base);
|
appender.put(base);
|
||||||
if (tokens[index] == TokenType.comma)
|
if (tokens[index] == TokenType.Comma)
|
||||||
++index;
|
++index;
|
||||||
else
|
else
|
||||||
break;
|
break;
|
||||||
|
|
@ -717,18 +729,18 @@ Struct parseStructOrUnion(const Token[] tokens, ref size_t index, string protect
|
||||||
s.attributes = attributes;
|
s.attributes = attributes;
|
||||||
s.protection = protection;
|
s.protection = protection;
|
||||||
s.name = tokens[index++].value;
|
s.name = tokens[index++].value;
|
||||||
if (tokens[index] == TokenType.lParen)
|
if (tokens[index] == TokenType.LParen)
|
||||||
s.templateParameters.insertInPlace(s.templateParameters.length,
|
s.templateParameters.insertInPlace(s.templateParameters.length,
|
||||||
map!("a.type")(parseParameters(tokens, index)));
|
map!("a.type")(parseParameters(tokens, index)));
|
||||||
|
|
||||||
if (index >= tokens.length) return s;
|
if (index >= tokens.length) return s;
|
||||||
|
|
||||||
if (tokens[index] == TokenType.tIf)
|
if (tokens[index] == TokenType.If)
|
||||||
s.constraint = parseConstraint(tokens, index);
|
s.constraint = parseConstraint(tokens, index);
|
||||||
|
|
||||||
if (index >= tokens.length) return s;
|
if (index >= tokens.length) return s;
|
||||||
|
|
||||||
if (tokens[index] == TokenType.lBrace)
|
if (tokens[index] == TokenType.LBrace)
|
||||||
parseStructBody(tokens, index, s);
|
parseStructBody(tokens, index, s);
|
||||||
else
|
else
|
||||||
tokens.skipBlockStatement(index);
|
tokens.skipBlockStatement(index);
|
||||||
|
|
@ -739,7 +751,7 @@ Struct parseStruct(const Token[] tokens, ref size_t index, string protection,
|
||||||
string[] attributes)
|
string[] attributes)
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(tokens[index] == TokenType.tStruct);
|
assert(tokens[index] == TokenType.Struct);
|
||||||
}
|
}
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
|
|
@ -750,7 +762,7 @@ Struct parseUnion(const Token[] tokens, ref size_t index, string protection,
|
||||||
string[] attributes)
|
string[] attributes)
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(tokens[index] == TokenType.tUnion);
|
assert(tokens[index] == TokenType.Union);
|
||||||
}
|
}
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
|
|
@ -765,23 +777,23 @@ Inherits parseInherits(const Token[] tokens, ref size_t index, string protection
|
||||||
i.name = tokens[index++].value;
|
i.name = tokens[index++].value;
|
||||||
i.protection = protection;
|
i.protection = protection;
|
||||||
i.attributes.insertInPlace(i.attributes.length, attributes);
|
i.attributes.insertInPlace(i.attributes.length, attributes);
|
||||||
if (tokens[index] == TokenType.lParen)
|
if (tokens[index] == TokenType.LParen)
|
||||||
i.templateParameters.insertInPlace(i.templateParameters.length,
|
i.templateParameters.insertInPlace(i.templateParameters.length,
|
||||||
map!("a.type")(parseParameters(tokens, index)));
|
map!("a.type")(parseParameters(tokens, index)));
|
||||||
|
|
||||||
if (index >= tokens.length) return i;
|
if (index >= tokens.length) return i;
|
||||||
|
|
||||||
if (tokens[index] == TokenType.tIf)
|
if (tokens[index] == TokenType.If)
|
||||||
i.constraint = parseConstraint(tokens, index);
|
i.constraint = parseConstraint(tokens, index);
|
||||||
|
|
||||||
if (index >= tokens.length) return i;
|
if (index >= tokens.length) return i;
|
||||||
|
|
||||||
if (tokens[index] == TokenType.colon)
|
if (tokens[index] == TokenType.Colon)
|
||||||
i.baseClasses = parseBaseClassList(tokens, index);
|
i.baseClasses = parseBaseClassList(tokens, index);
|
||||||
|
|
||||||
if (index >= tokens.length) return i;
|
if (index >= tokens.length) return i;
|
||||||
|
|
||||||
if (tokens[index] == TokenType.lBrace)
|
if (tokens[index] == TokenType.LBrace)
|
||||||
parseStructBody(tokens, index, i);
|
parseStructBody(tokens, index, i);
|
||||||
else
|
else
|
||||||
tokens.skipBlockStatement(index);
|
tokens.skipBlockStatement(index);
|
||||||
|
|
@ -792,7 +804,7 @@ Inherits parseInterface(const Token[] tokens, ref size_t index, string protectio
|
||||||
string[] attributes)
|
string[] attributes)
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert (tokens[index] == TokenType.tInterface);
|
assert (tokens[index] == TokenType.Interface);
|
||||||
}
|
}
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
|
|
@ -804,7 +816,7 @@ Inherits parseClass(const Token[] tokens, ref size_t index, string protection,
|
||||||
string[] attributes)
|
string[] attributes)
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
assert(tokens[index] == TokenType.tClass);
|
assert(tokens[index] == TokenType.Class);
|
||||||
}
|
}
|
||||||
body
|
body
|
||||||
{
|
{
|
||||||
|
|
|
||||||
285
tokenizer.d
285
tokenizer.d
|
|
@ -29,7 +29,7 @@ import codegen;
|
||||||
* Returns: The whitespace, or null if style was CODE_ONLY
|
* Returns: The whitespace, or null if style was CODE_ONLY
|
||||||
*/
|
*/
|
||||||
pure nothrow string lexWhitespace(S)(S inputString, ref size_t endIndex,
|
pure nothrow string lexWhitespace(S)(S inputString, ref size_t endIndex,
|
||||||
ref uint lineNumber, IterationStyle style = IterationStyle.CODE_ONLY) // I suggest to remove the last param
|
ref uint lineNumber)
|
||||||
if (isSomeString!S)
|
if (isSomeString!S)
|
||||||
{
|
{
|
||||||
immutable startIndex = endIndex;
|
immutable startIndex = endIndex;
|
||||||
|
|
@ -39,17 +39,11 @@ pure nothrow string lexWhitespace(S)(S inputString, ref size_t endIndex,
|
||||||
lineNumber++;
|
lineNumber++;
|
||||||
++endIndex;
|
++endIndex;
|
||||||
}
|
}
|
||||||
final switch (style)
|
return inputString[startIndex .. endIndex];
|
||||||
{
|
|
||||||
case IterationStyle.EVERYTHING:
|
|
||||||
return inputString[startIndex .. endIndex];
|
|
||||||
case IterationStyle.CODE_ONLY:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If inputString starts from #!, increments endIndex until it indexes the next line.
|
* If inputString starts with #!, increments endIndex until it indexes the next line.
|
||||||
* Params:
|
* Params:
|
||||||
* inputString = the source code to examine
|
* inputString = the source code to examine
|
||||||
* endIndex = an index into inputString
|
* endIndex = an index into inputString
|
||||||
|
|
@ -139,7 +133,6 @@ pure nothrow string lexComment(S)(ref S inputString, ref size_t endIndex,
|
||||||
* quote = the opening (and closing) quote character for the string to be
|
* quote = the opening (and closing) quote character for the string to be
|
||||||
* lexed
|
* lexed
|
||||||
* Returns: a string literal, including its opening and closing quote characters
|
* Returns: a string literal, including its opening and closing quote characters
|
||||||
* Bugs: Does not handle string suffixes
|
|
||||||
*/
|
*/
|
||||||
pure nothrow string lexString(S, C)(S inputString, ref size_t endIndex, ref uint lineNumber,
|
pure nothrow string lexString(S, C)(S inputString, ref size_t endIndex, ref uint lineNumber,
|
||||||
C quote, bool canEscape = true) if (isSomeString!S && isSomeChar!C)
|
C quote, bool canEscape = true) if (isSomeString!S && isSomeChar!C)
|
||||||
|
|
@ -258,7 +251,7 @@ pure nothrow Token lexNumber(S)(ref S inputString, ref size_t endIndex)
|
||||||
endIndex++;
|
endIndex++;
|
||||||
if (isEoF(inputString, endIndex))
|
if (isEoF(inputString, endIndex))
|
||||||
{
|
{
|
||||||
token.type = TokenType.intLiteral;
|
token.type = TokenType.IntLiteral;
|
||||||
token.value = inputString[startIndex .. endIndex];
|
token.value = inputString[startIndex .. endIndex];
|
||||||
return token;
|
return token;
|
||||||
}
|
}
|
||||||
|
|
@ -278,7 +271,7 @@ pure nothrow Token lexNumber(S)(ref S inputString, ref size_t endIndex)
|
||||||
lexHex(inputString, startIndex, ++endIndex, token);
|
lexHex(inputString, startIndex, ++endIndex, token);
|
||||||
return token;
|
return token;
|
||||||
default:
|
default:
|
||||||
token.type = TokenType.intLiteral;
|
token.type = TokenType.IntLiteral;
|
||||||
token.value = inputString[startIndex .. endIndex];
|
token.value = inputString[startIndex .. endIndex];
|
||||||
return token;
|
return token;
|
||||||
}
|
}
|
||||||
|
|
@ -296,7 +289,7 @@ pure nothrow void lexBinary(S)(ref S inputString, size_t startIndex,
|
||||||
bool lexingSuffix = false;
|
bool lexingSuffix = false;
|
||||||
bool isLong = false;
|
bool isLong = false;
|
||||||
bool isUnsigned = false;
|
bool isUnsigned = false;
|
||||||
token.type = TokenType.intLiteral;
|
token.type = TokenType.IntLiteral;
|
||||||
binaryLoop: while (!isEoF(inputString, endIndex))
|
binaryLoop: while (!isEoF(inputString, endIndex))
|
||||||
{
|
{
|
||||||
switch (inputString[endIndex])
|
switch (inputString[endIndex])
|
||||||
|
|
@ -316,11 +309,11 @@ pure nothrow void lexBinary(S)(ref S inputString, size_t startIndex,
|
||||||
lexingSuffix = true;
|
lexingSuffix = true;
|
||||||
if (isLong)
|
if (isLong)
|
||||||
{
|
{
|
||||||
token.type = TokenType.unsignedLongLiteral;
|
token.type = TokenType.UnsignedLongLiteral;
|
||||||
break binaryLoop;
|
break binaryLoop;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
token.type = TokenType.unsignedIntLiteral;
|
token.type = TokenType.UnsignedIntLiteral;
|
||||||
isUnsigned = true;
|
isUnsigned = true;
|
||||||
break;
|
break;
|
||||||
case 'L':
|
case 'L':
|
||||||
|
|
@ -330,11 +323,11 @@ pure nothrow void lexBinary(S)(ref S inputString, size_t startIndex,
|
||||||
lexingSuffix = true;
|
lexingSuffix = true;
|
||||||
if (isUnsigned)
|
if (isUnsigned)
|
||||||
{
|
{
|
||||||
token.type = TokenType.unsignedLongLiteral;
|
token.type = TokenType.UnsignedLongLiteral;
|
||||||
break binaryLoop;
|
break binaryLoop;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
token.type = TokenType.longLiteral;
|
token.type = TokenType.LongLiteral;
|
||||||
isLong = true;
|
isLong = true;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
|
@ -357,7 +350,7 @@ pure nothrow void lexDecimal(S)(ref S inputString, size_t startIndex,
|
||||||
bool foundDot = false;
|
bool foundDot = false;
|
||||||
bool foundE = false;
|
bool foundE = false;
|
||||||
bool foundPlusMinus = false;
|
bool foundPlusMinus = false;
|
||||||
token.type = TokenType.intLiteral;
|
token.type = TokenType.IntLiteral;
|
||||||
decimalLoop: while (!isEoF(inputString, endIndex))
|
decimalLoop: while (!isEoF(inputString, endIndex))
|
||||||
{
|
{
|
||||||
switch (inputString[endIndex])
|
switch (inputString[endIndex])
|
||||||
|
|
@ -370,10 +363,30 @@ pure nothrow void lexDecimal(S)(ref S inputString, size_t startIndex,
|
||||||
break;
|
break;
|
||||||
case 'e':
|
case 'e':
|
||||||
case 'E':
|
case 'E':
|
||||||
if (foundE)
|
// For this to be a valid exponent, the next character must be a
|
||||||
|
// decimal character or a sign
|
||||||
|
if (foundE || isEoF(inputString, endIndex + 1))
|
||||||
break decimalLoop;
|
break decimalLoop;
|
||||||
|
switch (inputString[endIndex + 1])
|
||||||
|
{
|
||||||
|
case '+':
|
||||||
|
case '-':
|
||||||
|
if (isEoF(inputString, endIndex + 2)
|
||||||
|
|| inputString[endIndex + 2] < '0'
|
||||||
|
|| inputString[endIndex + 2] > '9')
|
||||||
|
{
|
||||||
|
break decimalLoop;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case '0': .. case '9':
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break decimalLoop;
|
||||||
|
}
|
||||||
++endIndex;
|
++endIndex;
|
||||||
foundE = true;
|
foundE = true;
|
||||||
|
isDouble = true;
|
||||||
|
token.type = TokenType.DoubleLiteral;
|
||||||
break;
|
break;
|
||||||
case '+':
|
case '+':
|
||||||
case '-':
|
case '-':
|
||||||
|
|
@ -389,7 +402,7 @@ pure nothrow void lexDecimal(S)(ref S inputString, size_t startIndex,
|
||||||
break decimalLoop; // two dots with other characters between them
|
break decimalLoop; // two dots with other characters between them
|
||||||
++endIndex;
|
++endIndex;
|
||||||
foundDot = true;
|
foundDot = true;
|
||||||
token.type = TokenType.doubleLiteral;
|
token.type = TokenType.DoubleLiteral;
|
||||||
isDouble = true;
|
isDouble = true;
|
||||||
break;
|
break;
|
||||||
case 'u':
|
case 'u':
|
||||||
|
|
@ -399,9 +412,9 @@ pure nothrow void lexDecimal(S)(ref S inputString, size_t startIndex,
|
||||||
++endIndex;
|
++endIndex;
|
||||||
lexingSuffix = true;
|
lexingSuffix = true;
|
||||||
if (isLong)
|
if (isLong)
|
||||||
token.type = TokenType.unsignedLongLiteral;
|
token.type = TokenType.UnsignedLongLiteral;
|
||||||
else
|
else
|
||||||
token.type = TokenType.unsignedIntLiteral;
|
token.type = TokenType.UnsignedIntLiteral;
|
||||||
isUnsigned = true;
|
isUnsigned = true;
|
||||||
break;
|
break;
|
||||||
case 'L':
|
case 'L':
|
||||||
|
|
@ -412,11 +425,11 @@ pure nothrow void lexDecimal(S)(ref S inputString, size_t startIndex,
|
||||||
++endIndex;
|
++endIndex;
|
||||||
lexingSuffix = true;
|
lexingSuffix = true;
|
||||||
if (isDouble)
|
if (isDouble)
|
||||||
token.type = TokenType.realLiteral;
|
token.type = TokenType.RealLiteral;
|
||||||
else if (isUnsigned)
|
else if (isUnsigned)
|
||||||
token.type = TokenType.unsignedLongLiteral;
|
token.type = TokenType.UnsignedLongLiteral;
|
||||||
else
|
else
|
||||||
token.type = TokenType.longLiteral;
|
token.type = TokenType.LongLiteral;
|
||||||
isLong = true;
|
isLong = true;
|
||||||
break;
|
break;
|
||||||
case 'f':
|
case 'f':
|
||||||
|
|
@ -425,40 +438,70 @@ pure nothrow void lexDecimal(S)(ref S inputString, size_t startIndex,
|
||||||
if (isUnsigned || isLong)
|
if (isUnsigned || isLong)
|
||||||
break decimalLoop;
|
break decimalLoop;
|
||||||
++endIndex;
|
++endIndex;
|
||||||
token.type = TokenType.floatLiteral;
|
token.type = TokenType.FloatLiteral;
|
||||||
break decimalLoop;
|
break decimalLoop;
|
||||||
|
case 'i':
|
||||||
|
++endIndex;
|
||||||
|
// Spec says that this is the last suffix, so all cases break the
|
||||||
|
// loop.
|
||||||
|
if (isDouble)
|
||||||
|
{
|
||||||
|
token.type = TokenType.Idouble;
|
||||||
|
break decimalLoop;
|
||||||
|
}
|
||||||
|
else if (isFloat)
|
||||||
|
{
|
||||||
|
token.type = TokenType.Ifloat;
|
||||||
|
break decimalLoop;
|
||||||
|
}
|
||||||
|
else if (isReal)
|
||||||
|
{
|
||||||
|
token.type = TokenType.Ireal;
|
||||||
|
break decimalLoop;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// There is no imaginary int
|
||||||
|
--endIndex;
|
||||||
|
break decimalLoop;
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
break decimalLoop;
|
break decimalLoop;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// suggest to extract lexing integers into a separate function
|
|
||||||
// please see unittest below
|
|
||||||
|
|
||||||
token.value = inputString[startIndex .. endIndex];
|
token.value = inputString[startIndex .. endIndex];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
unittest {
|
unittest {
|
||||||
dump!lexDecimal("55e-4"); // yeilds intLiteral, but should be float
|
Token t;
|
||||||
dump!lexDecimal("3e+f"); // floatLiteral, but should be considered invalid
|
size_t start, end;
|
||||||
dump!lexDecimal("3e++f"); // intLiteral 3e+, but should be considered invalid
|
lexDecimal!string("55e-4", start, end, t);
|
||||||
// actually, there are lots of bugs. The point is that without decomposition of integer lexing from floating-point lexing
|
assert(t.value == "55e-4");
|
||||||
// it is very hard to prove algorithm correctness
|
assert(t.type == TokenType.DoubleLiteral);
|
||||||
|
|
||||||
|
start = end = 0;
|
||||||
|
lexDecimal!string("123.45f", start, end, t);
|
||||||
|
assert(t.value == "123.45f");
|
||||||
|
assert(t.type == TokenType.FloatLiteral);
|
||||||
|
|
||||||
|
start = end = 0;
|
||||||
|
lexDecimal!string("3e+f", start, end, t);
|
||||||
|
assert(t.value == "3");
|
||||||
|
assert(t.type == TokenType.IntLiteral);
|
||||||
|
|
||||||
|
start = end = 0;
|
||||||
|
lexDecimal!string("3e++f", start, end, t);
|
||||||
|
assert(t.value == "3");
|
||||||
|
assert(t.type == TokenType.IntLiteral);
|
||||||
|
|
||||||
|
start = end = 0;
|
||||||
|
lexDecimal!string("1234..1237", start, end, t);
|
||||||
|
assert(t.value == "1234");
|
||||||
|
assert(t.type == TokenType.IntLiteral);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Temporary function to illustrate some problems
|
|
||||||
// Executes T and dumps results to console
|
|
||||||
void dump(alias T)(string s) {
|
|
||||||
size_t start;
|
|
||||||
size_t end;
|
|
||||||
Token tok;
|
|
||||||
T!(string)(s, start, end, tok);
|
|
||||||
// dump results
|
|
||||||
writeln(tok.type);
|
|
||||||
writeln(tok.value);
|
|
||||||
writeln(start);
|
|
||||||
writeln(end);
|
|
||||||
}
|
|
||||||
|
|
||||||
nothrow void lexHex(S)(ref S inputString, ref size_t startIndex,
|
nothrow void lexHex(S)(ref S inputString, ref size_t startIndex,
|
||||||
ref size_t endIndex, ref Token token) if (isSomeString!S)
|
ref size_t endIndex, ref Token token) if (isSomeString!S)
|
||||||
|
|
@ -472,7 +515,7 @@ nothrow void lexHex(S)(ref S inputString, ref size_t startIndex,
|
||||||
bool foundDot = false;
|
bool foundDot = false;
|
||||||
bool foundE = false;
|
bool foundE = false;
|
||||||
bool foundPlusMinus = false;
|
bool foundPlusMinus = false;
|
||||||
token.type = TokenType.intLiteral;
|
token.type = TokenType.IntLiteral;
|
||||||
hexLoop: while (!isEoF(inputString, endIndex))
|
hexLoop: while (!isEoF(inputString, endIndex))
|
||||||
{
|
{
|
||||||
switch (inputString[endIndex])
|
switch (inputString[endIndex])
|
||||||
|
|
@ -506,7 +549,7 @@ nothrow void lexHex(S)(ref S inputString, ref size_t startIndex,
|
||||||
break hexLoop; // two dots with other characters between them
|
break hexLoop; // two dots with other characters between them
|
||||||
++endIndex;
|
++endIndex;
|
||||||
foundDot = true;
|
foundDot = true;
|
||||||
token.type = TokenType.doubleLiteral;
|
token.type = TokenType.DoubleLiteral;
|
||||||
isDouble = true;
|
isDouble = true;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
|
@ -567,7 +610,7 @@ Token[] tokenize(S)(S inputString, IterationStyle iterationStyle = IterationStyl
|
||||||
Token currentToken;
|
Token currentToken;
|
||||||
currentToken.lineNumber = lineNumber; // lineNumber is always 1
|
currentToken.lineNumber = lineNumber; // lineNumber is always 1
|
||||||
currentToken.value = lexScriptLine(inputString, endIndex, lineNumber);
|
currentToken.value = lexScriptLine(inputString, endIndex, lineNumber);
|
||||||
currentToken.type = TokenType.scriptLine;
|
currentToken.type = TokenType.ScriptLine;
|
||||||
}
|
}
|
||||||
|
|
||||||
while (!isEoF(inputString, endIndex))
|
while (!isEoF(inputString, endIndex))
|
||||||
|
|
@ -581,8 +624,8 @@ Token[] tokenize(S)(S inputString, IterationStyle iterationStyle = IterationStyl
|
||||||
{
|
{
|
||||||
currentToken.lineNumber = lineNumber;
|
currentToken.lineNumber = lineNumber;
|
||||||
currentToken.value = lexWhitespace(inputString, endIndex,
|
currentToken.value = lexWhitespace(inputString, endIndex,
|
||||||
lineNumber, IterationStyle.EVERYTHING); // note: I suggest to remove the last parameter to simplify lexWhitespace
|
lineNumber);
|
||||||
currentToken.type = TokenType.whitespace;
|
currentToken.type = TokenType.Whitespace;
|
||||||
tokenAppender.put(currentToken);
|
tokenAppender.put(currentToken);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
|
@ -594,66 +637,66 @@ Token[] tokenize(S)(S inputString, IterationStyle iterationStyle = IterationStyl
|
||||||
outerSwitch: switch(inputString[endIndex])
|
outerSwitch: switch(inputString[endIndex])
|
||||||
{
|
{
|
||||||
mixin(generateCaseTrie(
|
mixin(generateCaseTrie(
|
||||||
"=", "TokenType.assign",
|
"=", "TokenType.Assign",
|
||||||
"&", "TokenType.bitAnd",
|
"&", "TokenType.BitAnd",
|
||||||
"&=", "TokenType.bitAndEquals",
|
"&=", "TokenType.BitAndEquals",
|
||||||
"|", "TokenType.bitOr",
|
"|", "TokenType.BitOr",
|
||||||
"|=", "TokenType.bitOrEquals",
|
"|=", "TokenType.BitOrEquals",
|
||||||
"~=", "TokenType.catEquals",
|
"~=", "TokenType.CatEquals",
|
||||||
":", "TokenType.colon",
|
":", "TokenType.Colon",
|
||||||
",", "TokenType.comma",
|
",", "TokenType.Comma",
|
||||||
"$", "TokenType.dollar",
|
"$", "TokenType.Dollar",
|
||||||
".", "TokenType.dot",
|
".", "TokenType.Dot",
|
||||||
"==", "TokenType.equals",
|
"==", "TokenType.Equals",
|
||||||
"=>", "TokenType.goesTo",
|
"=>", "TokenType.GoesTo",
|
||||||
">", "TokenType.greater",
|
">", "TokenType.Greater",
|
||||||
">=", "TokenType.greaterEqual",
|
">=", "TokenType.GreaterEqual",
|
||||||
"#", "TokenType.hash",
|
"#", "TokenType.Hash",
|
||||||
"&&", "TokenType.lAnd",
|
"&&", "TokenType.LogicAnd",
|
||||||
"{", "TokenType.lBrace",
|
"{", "TokenType.LBrace",
|
||||||
"[", "TokenType.lBracket",
|
"[", "TokenType.LBracket",
|
||||||
"<", "TokenType.less",
|
"<", "TokenType.Less",
|
||||||
"<=", "TokenType.lessEqual",
|
"<=", "TokenType.LessEqual",
|
||||||
"<>=", "TokenType.lessEqualGreater",
|
"<>=", "TokenType.LessEqualGreater",
|
||||||
"<>", "TokenType.lessOrGreater",
|
"<>", "TokenType.LessOrGreater",
|
||||||
"||", "TokenType.lOr",
|
"||", "TokenType.LogicOr",
|
||||||
"(", "TokenType.lParen",
|
"(", "TokenType.LParen",
|
||||||
"-", "TokenType.minus",
|
"-", "TokenType.Minus",
|
||||||
"-=", "TokenType.minusEquals",
|
"-=", "TokenType.MinusEquals",
|
||||||
"%", "TokenType.mod",
|
"%", "TokenType.Mod",
|
||||||
"%=", "TokenType.modEquals",
|
"%=", "TokenType.ModEquals",
|
||||||
"*=", "TokenType.mulEquals",
|
"*=", "TokenType.MulEquals",
|
||||||
"!", "TokenType.not",
|
"!", "TokenType.Not",
|
||||||
"!=", "TokenType.notEquals",
|
"!=", "TokenType.NotEquals",
|
||||||
"!>", "TokenType.notGreater",
|
"!>", "TokenType.NotGreater",
|
||||||
"!>=", "TokenType.notGreaterEqual",
|
"!>=", "TokenType.NotGreaterEqual",
|
||||||
"!<", "TokenType.notLess",
|
"!<", "TokenType.NotLess",
|
||||||
"!<=", "TokenType.notLessEqual",
|
"!<=", "TokenType.NotLessEqual",
|
||||||
"!<>", "TokenType.notLessEqualGreater",
|
"!<>", "TokenType.NotLessEqualGreater",
|
||||||
"+", "TokenType.plus",
|
"+", "TokenType.Plus",
|
||||||
"+=", "TokenType.plusEquals",
|
"+=", "TokenType.PlusEquals",
|
||||||
"^^", "TokenType.pow",
|
"^^", "TokenType.Pow",
|
||||||
"^^=", "TokenType.powEquals",
|
"^^=", "TokenType.PowEquals",
|
||||||
"}", "TokenType.rBrace",
|
"}", "TokenType.RBrace",
|
||||||
"]", "TokenType.rBracket",
|
"]", "TokenType.RBracket",
|
||||||
")", "TokenType.rParen",
|
")", "TokenType.RParen",
|
||||||
";", "TokenType.semicolon",
|
";", "TokenType.Semicolon",
|
||||||
"<<", "TokenType.shiftLeft",
|
"<<", "TokenType.ShiftLeft",
|
||||||
"<<=", "TokenType.shiftLeftEqual",
|
"<<=", "TokenType.ShiftLeftEqual",
|
||||||
">>", "TokenType.shiftRight",
|
">>", "TokenType.ShiftRight",
|
||||||
">>=", "TokenType.shiftRightEqual",
|
">>=", "TokenType.ShiftRightEqual",
|
||||||
"..", "TokenType.slice",
|
"..", "TokenType.Slice",
|
||||||
"*", "TokenType.star",
|
"*", "TokenType.Star",
|
||||||
"?", "TokenType.ternary",
|
"?", "TokenType.Ternary",
|
||||||
"~", "TokenType.tilde",
|
"~", "TokenType.Tilde",
|
||||||
"--", "TokenType.uMinus",
|
"--", "TokenType.Decrement",
|
||||||
"!<>=", "TokenType.unordered",
|
"!<>=", "TokenType.Unordered",
|
||||||
">>>", "TokenType.unsignedShiftRight",
|
">>>", "TokenType.UnsignedShiftRight",
|
||||||
">>>=", "TokenType.unsignedShiftRightEqual",
|
">>>=", "TokenType.UnsignedShiftRightEqual",
|
||||||
"++", "TokenType.uPlus",
|
"++", "TokenType.Increment",
|
||||||
"...", "TokenType.vararg",
|
"...", "TokenType.Vararg",
|
||||||
"^", "TokenType.xor",
|
"^", "TokenType.Xor",
|
||||||
"^=", "TokenType.xorEquals",
|
"^=", "TokenType.XorEquals",
|
||||||
));
|
));
|
||||||
case '0': .. case '9':
|
case '0': .. case '9':
|
||||||
currentToken = lexNumber(inputString, endIndex);
|
currentToken = lexNumber(inputString, endIndex);
|
||||||
|
|
@ -663,7 +706,7 @@ Token[] tokenize(S)(S inputString, IterationStyle iterationStyle = IterationStyl
|
||||||
if (isEoF(inputString, endIndex))
|
if (isEoF(inputString, endIndex))
|
||||||
{
|
{
|
||||||
currentToken.value = "/";
|
currentToken.value = "/";
|
||||||
currentToken.type = TokenType.div;
|
currentToken.type = TokenType.Div;
|
||||||
currentToken.lineNumber = lineNumber;
|
currentToken.lineNumber = lineNumber;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
@ -681,17 +724,17 @@ Token[] tokenize(S)(S inputString, IterationStyle iterationStyle = IterationStyl
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
currentToken.value = lexComment(inputString, endIndex, lineNumber);
|
currentToken.value = lexComment(inputString, endIndex, lineNumber);
|
||||||
currentToken.type = TokenType.comment;
|
currentToken.type = TokenType.Comment;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case '=':
|
case '=':
|
||||||
currentToken.value = "/=";
|
currentToken.value = "/=";
|
||||||
currentToken.type = TokenType.divEquals;
|
currentToken.type = TokenType.DivEquals;
|
||||||
++endIndex;
|
++endIndex;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
currentToken.value = "/";
|
currentToken.value = "/";
|
||||||
currentToken.type = TokenType.div;
|
currentToken.type = TokenType.Div;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
@ -702,13 +745,13 @@ Token[] tokenize(S)(S inputString, IterationStyle iterationStyle = IterationStyl
|
||||||
currentToken.lineNumber = lineNumber;
|
currentToken.lineNumber = lineNumber;
|
||||||
currentToken.value = lexString(inputString, endIndex,
|
currentToken.value = lexString(inputString, endIndex,
|
||||||
lineNumber, inputString[endIndex], false);
|
lineNumber, inputString[endIndex], false);
|
||||||
currentToken.type = TokenType.stringLiteral;
|
currentToken.type = TokenType.StringLiteral;
|
||||||
break;
|
break;
|
||||||
case '`':
|
case '`':
|
||||||
currentToken.lineNumber = lineNumber;
|
currentToken.lineNumber = lineNumber;
|
||||||
currentToken.value = lexString(inputString, endIndex, lineNumber,
|
currentToken.value = lexString(inputString, endIndex, lineNumber,
|
||||||
inputString[endIndex], false);
|
inputString[endIndex], false);
|
||||||
currentToken.type = TokenType.stringLiteral;
|
currentToken.type = TokenType.StringLiteral;
|
||||||
break;
|
break;
|
||||||
case 'x':
|
case 'x':
|
||||||
++endIndex;
|
++endIndex;
|
||||||
|
|
@ -721,7 +764,7 @@ Token[] tokenize(S)(S inputString, IterationStyle iterationStyle = IterationStyl
|
||||||
currentToken.lineNumber = lineNumber;
|
currentToken.lineNumber = lineNumber;
|
||||||
currentToken.value = lexString(inputString, endIndex, lineNumber,
|
currentToken.value = lexString(inputString, endIndex, lineNumber,
|
||||||
inputString[endIndex]);
|
inputString[endIndex]);
|
||||||
currentToken.type = TokenType.stringLiteral;
|
currentToken.type = TokenType.StringLiteral;
|
||||||
break;
|
break;
|
||||||
case 'q':
|
case 'q':
|
||||||
currentToken.value = "q";
|
currentToken.value = "q";
|
||||||
|
|
@ -734,13 +777,13 @@ Token[] tokenize(S)(S inputString, IterationStyle iterationStyle = IterationStyl
|
||||||
currentToken.lineNumber = lineNumber;
|
currentToken.lineNumber = lineNumber;
|
||||||
currentToken.value ~= lexDelimitedString(inputString,
|
currentToken.value ~= lexDelimitedString(inputString,
|
||||||
endIndex, lineNumber);
|
endIndex, lineNumber);
|
||||||
currentToken.type = TokenType.stringLiteral;
|
currentToken.type = TokenType.StringLiteral;
|
||||||
break outerSwitch;
|
break outerSwitch;
|
||||||
case '{':
|
case '{':
|
||||||
currentToken.lineNumber = lineNumber;
|
currentToken.lineNumber = lineNumber;
|
||||||
currentToken.value ~= lexTokenString(inputString,
|
currentToken.value ~= lexTokenString(inputString,
|
||||||
endIndex, lineNumber);
|
endIndex, lineNumber);
|
||||||
currentToken.type = TokenType.stringLiteral;
|
currentToken.type = TokenType.StringLiteral;
|
||||||
break outerSwitch;
|
break outerSwitch;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue