Add 'import' statements
This commit is contained in:
@@ -1,6 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
|
||||
def tokenizeExtend(input, current, line):
|
||||
return tokenizeKeyword(TokenType.EXTEND, "extend", input, current, line)
|
||||
@@ -1,5 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
def tokenizeFunction(input, current, line):
|
||||
return tokenizeKeyword(TokenType.FUNCTION, 'function', input, current, line)
|
||||
34
smnp/token/tokenizers/keyword.py
Normal file
34
smnp/token/tokenizers/keyword.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from smnp.token.tools import tokenizeKeywords, tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
from smnp.type.model import Type
|
||||
|
||||
|
||||
def tokenizeType(input, current, line):
|
||||
types = [ type.name.lower() for type in Type ]
|
||||
return tokenizeKeywords(TokenType.TYPE, input, current, line, *types)
|
||||
|
||||
|
||||
def tokenizeReturn(input, current, line):
|
||||
return tokenizeKeyword(TokenType.RETURN, 'return', input, current, line)
|
||||
|
||||
|
||||
def tokenizeFunction(input, current, line):
|
||||
return tokenizeKeyword(TokenType.FUNCTION, 'function', input, current, line)
|
||||
|
||||
|
||||
def tokenizeExtend(input, current, line):
|
||||
return tokenizeKeyword(TokenType.EXTEND, "extend", input, current, line)
|
||||
|
||||
|
||||
def tokenizeImport(input, current, line):
|
||||
return tokenizeKeyword(TokenType.IMPORT, "import", input, current, line)
|
||||
|
||||
|
||||
def tokenizeFrom(input, current, line):
|
||||
return tokenizeKeyword(TokenType.FROM, "from", input, current, line)
|
||||
|
||||
|
||||
def tokenizeAs(input, current, line):
|
||||
return tokenizeKeyword(TokenType.AS, "as", input, current, line)
|
||||
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
def tokenizeReturn(input, current, line):
|
||||
return tokenizeKeyword(TokenType.RETURN, 'return', input, current, line)
|
||||
@@ -1,8 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeywords
|
||||
from smnp.token.type import TokenType
|
||||
from smnp.type.model import Type
|
||||
|
||||
|
||||
def tokenizeType(input, current, line):
|
||||
types = [ type.name.lower() for type in Type ]
|
||||
return tokenizeKeywords(TokenType.TYPE, input, current, line, *types)
|
||||
Reference in New Issue
Block a user