Add 'import' statements

This commit is contained in:
Bartłomiej Pluta
2019-07-06 12:10:58 +02:00
parent bf40c5130c
commit 7b4f4fa8fb
9 changed files with 121 additions and 28 deletions

View File

@@ -1,6 +0,0 @@
from smnp.token.tools import tokenizeKeyword
from smnp.token.type import TokenType
def tokenizeExtend(input, current, line):
return tokenizeKeyword(TokenType.EXTEND, "extend", input, current, line)

View File

@@ -1,5 +0,0 @@
from smnp.token.tools import tokenizeKeyword
from smnp.token.type import TokenType
def tokenizeFunction(input, current, line):
return tokenizeKeyword(TokenType.FUNCTION, 'function', input, current, line)

View File

@@ -0,0 +1,34 @@
from smnp.token.tools import tokenizeKeywords, tokenizeKeyword
from smnp.token.type import TokenType
from smnp.type.model import Type
def tokenizeType(input, current, line):
types = [ type.name.lower() for type in Type ]
return tokenizeKeywords(TokenType.TYPE, input, current, line, *types)
def tokenizeReturn(input, current, line):
return tokenizeKeyword(TokenType.RETURN, 'return', input, current, line)
def tokenizeFunction(input, current, line):
return tokenizeKeyword(TokenType.FUNCTION, 'function', input, current, line)
def tokenizeExtend(input, current, line):
return tokenizeKeyword(TokenType.EXTEND, "extend", input, current, line)
def tokenizeImport(input, current, line):
return tokenizeKeyword(TokenType.IMPORT, "import", input, current, line)
def tokenizeFrom(input, current, line):
return tokenizeKeyword(TokenType.FROM, "from", input, current, line)
def tokenizeAs(input, current, line):
return tokenizeKeyword(TokenType.AS, "as", input, current, line)

View File

@@ -1,5 +0,0 @@
from smnp.token.tools import tokenizeKeyword
from smnp.token.type import TokenType
def tokenizeReturn(input, current, line):
return tokenizeKeyword(TokenType.RETURN, 'return', input, current, line)

View File

@@ -1,8 +0,0 @@
from smnp.token.tools import tokenizeKeywords
from smnp.token.type import TokenType
from smnp.type.model import Type
def tokenizeType(input, current, line):
types = [ type.name.lower() for type in Type ]
return tokenizeKeywords(TokenType.TYPE, input, current, line, *types)