Add 'import' statements
This commit is contained in:
@@ -6,18 +6,16 @@ from smnp.token.tokenizers.bracket import tokenizeOpenBracket, tokenizeCloseBrac
|
||||
from smnp.token.tokenizers.comma import tokenizeComma
|
||||
from smnp.token.tokenizers.comment import tokenizeComment
|
||||
from smnp.token.tokenizers.dot import tokenizeDot
|
||||
from smnp.token.tokenizers.extend import tokenizeExtend
|
||||
from smnp.token.tokenizers.function import tokenizeFunction
|
||||
from smnp.token.tokenizers.identifier import tokenizeIdentifier
|
||||
from smnp.token.tokenizers.integer import tokenizeInteger
|
||||
from smnp.token.tokenizers.keyword import tokenizeType, tokenizeFunction, tokenizeReturn, tokenizeExtend, \
|
||||
tokenizeImport, tokenizeFrom, tokenizeAs
|
||||
from smnp.token.tokenizers.minus import tokenizeMinus
|
||||
from smnp.token.tokenizers.note import tokenizeNote
|
||||
from smnp.token.tokenizers.paren import tokenizeOpenParen, tokenizeCloseParen
|
||||
from smnp.token.tokenizers.percent import tokenizePercent
|
||||
from smnp.token.tokenizers.ret import tokenizeReturn
|
||||
from smnp.token.tokenizers.square import tokenizeOpenSquare, tokenizeCloseSquare
|
||||
from smnp.token.tokenizers.string import tokenizeString
|
||||
from smnp.token.tokenizers.type import tokenizeType
|
||||
from smnp.token.tokenizers.whitespace import tokenizeWhitespaces
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
@@ -32,6 +30,9 @@ tokenizers = (
|
||||
tokenizeFunction,
|
||||
tokenizeReturn,
|
||||
tokenizeExtend,
|
||||
tokenizeImport,
|
||||
tokenizeFrom,
|
||||
tokenizeAs,
|
||||
tokenizeInteger,
|
||||
tokenizeNote,
|
||||
tokenizeIdentifier,
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
|
||||
def tokenizeExtend(input, current, line):
|
||||
return tokenizeKeyword(TokenType.EXTEND, "extend", input, current, line)
|
||||
@@ -1,5 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
def tokenizeFunction(input, current, line):
|
||||
return tokenizeKeyword(TokenType.FUNCTION, 'function', input, current, line)
|
||||
34
smnp/token/tokenizers/keyword.py
Normal file
34
smnp/token/tokenizers/keyword.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from smnp.token.tools import tokenizeKeywords, tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
from smnp.type.model import Type
|
||||
|
||||
|
||||
def tokenizeType(input, current, line):
|
||||
types = [ type.name.lower() for type in Type ]
|
||||
return tokenizeKeywords(TokenType.TYPE, input, current, line, *types)
|
||||
|
||||
|
||||
def tokenizeReturn(input, current, line):
|
||||
return tokenizeKeyword(TokenType.RETURN, 'return', input, current, line)
|
||||
|
||||
|
||||
def tokenizeFunction(input, current, line):
|
||||
return tokenizeKeyword(TokenType.FUNCTION, 'function', input, current, line)
|
||||
|
||||
|
||||
def tokenizeExtend(input, current, line):
|
||||
return tokenizeKeyword(TokenType.EXTEND, "extend", input, current, line)
|
||||
|
||||
|
||||
def tokenizeImport(input, current, line):
|
||||
return tokenizeKeyword(TokenType.IMPORT, "import", input, current, line)
|
||||
|
||||
|
||||
def tokenizeFrom(input, current, line):
|
||||
return tokenizeKeyword(TokenType.FROM, "from", input, current, line)
|
||||
|
||||
|
||||
def tokenizeAs(input, current, line):
|
||||
return tokenizeKeyword(TokenType.AS, "as", input, current, line)
|
||||
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
def tokenizeReturn(input, current, line):
|
||||
return tokenizeKeyword(TokenType.RETURN, 'return', input, current, line)
|
||||
@@ -1,8 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeywords
|
||||
from smnp.token.type import TokenType
|
||||
from smnp.type.model import Type
|
||||
|
||||
|
||||
def tokenizeType(input, current, line):
|
||||
types = [ type.name.lower() for type in Type ]
|
||||
return tokenizeKeywords(TokenType.TYPE, input, current, line, *types)
|
||||
@@ -23,3 +23,6 @@ class TokenType(Enum):
|
||||
CLOSE_SQUARE = auto()
|
||||
TYPE = auto()
|
||||
EXTEND = auto()
|
||||
IMPORT = auto()
|
||||
FROM = auto()
|
||||
AS = auto()
|
||||
Reference in New Issue
Block a user