Add 'import' statements
This commit is contained in:
76
smnp/newast/node/imports.py
Normal file
76
smnp/newast/node/imports.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from smnp.newast.node.identifier import IdentifierNode
|
||||
from smnp.newast.node.model import Node
|
||||
from smnp.newast.node.none import NoneNode
|
||||
from smnp.newast.node.string import StringLiteralNode
|
||||
from smnp.newast.node.type import TypeNode
|
||||
from smnp.newast.parser import Parser
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
|
||||
class ImportNode(Node):
|
||||
def __init__(self, pos):
|
||||
super().__init__(pos)
|
||||
self.children = [NoneNode(), NoneNode(), NoneNode()]
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
return self[0]
|
||||
|
||||
@source.setter
|
||||
def source(self, value):
|
||||
self[0] = value
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self[1]
|
||||
|
||||
@type.setter
|
||||
def type(self, value):
|
||||
self[1] = value
|
||||
|
||||
@property
|
||||
def variable(self):
|
||||
return self[2]
|
||||
|
||||
@variable.setter
|
||||
def variable(self, value):
|
||||
self[2] = value
|
||||
|
||||
@classmethod
|
||||
def _parse(cls, input):
|
||||
return Parser.oneOf(
|
||||
cls._literalImportParser(),
|
||||
cls._fileImportParser()
|
||||
)(input)
|
||||
|
||||
@classmethod
|
||||
def _literalImportParser(cls):
|
||||
def createNode(importKeyword, type, fromKeyword, source, asKeyword, variable):
|
||||
node = ImportNode(importKeyword.pos)
|
||||
node.source = source
|
||||
node.type = type
|
||||
node.variable = variable
|
||||
return node
|
||||
|
||||
return Parser.allOf(
|
||||
Parser.terminalParser(TokenType.IMPORT),
|
||||
TypeNode.parse,
|
||||
Parser.terminalParser(TokenType.FROM),
|
||||
StringLiteralNode._literalParser(),
|
||||
Parser.terminalParser(TokenType.AS),
|
||||
IdentifierNode.identifierParser(),
|
||||
createNode=createNode
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _fileImportParser(cls):
|
||||
def createNode(importKeyword, source):
|
||||
node = ImportNode(importKeyword.pos)
|
||||
node.source = source
|
||||
return node
|
||||
|
||||
return Parser.allOf(
|
||||
Parser.terminalParser(TokenType.IMPORT),
|
||||
StringLiteralNode._literalParser(),
|
||||
createNode=createNode
|
||||
)
|
||||
@@ -2,8 +2,10 @@ from smnp.error.syntax import SyntaxException
|
||||
from smnp.newast.node.expression import ExpressionNode
|
||||
from smnp.newast.node.extend import ExtendNode
|
||||
from smnp.newast.node.function import FunctionDefinitionNode
|
||||
from smnp.newast.node.imports import ImportNode
|
||||
from smnp.newast.node.model import Node, ParseResult
|
||||
from smnp.newast.node.statement import StatementNode
|
||||
|
||||
from smnp.newast.parser import Parser
|
||||
|
||||
|
||||
@@ -18,6 +20,7 @@ class Program(Node):
|
||||
FunctionDefinitionNode.parse,
|
||||
ExtendNode.parse,
|
||||
ExpressionNode.parse,
|
||||
ImportNode.parse,
|
||||
StatementNode.parse,
|
||||
exception = SyntaxException(f"Unknown statement: {input.current().pos}")
|
||||
)(input)
|
||||
|
||||
@@ -6,18 +6,16 @@ from smnp.token.tokenizers.bracket import tokenizeOpenBracket, tokenizeCloseBrac
|
||||
from smnp.token.tokenizers.comma import tokenizeComma
|
||||
from smnp.token.tokenizers.comment import tokenizeComment
|
||||
from smnp.token.tokenizers.dot import tokenizeDot
|
||||
from smnp.token.tokenizers.extend import tokenizeExtend
|
||||
from smnp.token.tokenizers.function import tokenizeFunction
|
||||
from smnp.token.tokenizers.identifier import tokenizeIdentifier
|
||||
from smnp.token.tokenizers.integer import tokenizeInteger
|
||||
from smnp.token.tokenizers.keyword import tokenizeType, tokenizeFunction, tokenizeReturn, tokenizeExtend, \
|
||||
tokenizeImport, tokenizeFrom, tokenizeAs
|
||||
from smnp.token.tokenizers.minus import tokenizeMinus
|
||||
from smnp.token.tokenizers.note import tokenizeNote
|
||||
from smnp.token.tokenizers.paren import tokenizeOpenParen, tokenizeCloseParen
|
||||
from smnp.token.tokenizers.percent import tokenizePercent
|
||||
from smnp.token.tokenizers.ret import tokenizeReturn
|
||||
from smnp.token.tokenizers.square import tokenizeOpenSquare, tokenizeCloseSquare
|
||||
from smnp.token.tokenizers.string import tokenizeString
|
||||
from smnp.token.tokenizers.type import tokenizeType
|
||||
from smnp.token.tokenizers.whitespace import tokenizeWhitespaces
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
@@ -32,6 +30,9 @@ tokenizers = (
|
||||
tokenizeFunction,
|
||||
tokenizeReturn,
|
||||
tokenizeExtend,
|
||||
tokenizeImport,
|
||||
tokenizeFrom,
|
||||
tokenizeAs,
|
||||
tokenizeInteger,
|
||||
tokenizeNote,
|
||||
tokenizeIdentifier,
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
|
||||
def tokenizeExtend(input, current, line):
|
||||
return tokenizeKeyword(TokenType.EXTEND, "extend", input, current, line)
|
||||
@@ -1,5 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
def tokenizeFunction(input, current, line):
|
||||
return tokenizeKeyword(TokenType.FUNCTION, 'function', input, current, line)
|
||||
34
smnp/token/tokenizers/keyword.py
Normal file
34
smnp/token/tokenizers/keyword.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from smnp.token.tools import tokenizeKeywords, tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
from smnp.type.model import Type
|
||||
|
||||
|
||||
def tokenizeType(input, current, line):
|
||||
types = [ type.name.lower() for type in Type ]
|
||||
return tokenizeKeywords(TokenType.TYPE, input, current, line, *types)
|
||||
|
||||
|
||||
def tokenizeReturn(input, current, line):
|
||||
return tokenizeKeyword(TokenType.RETURN, 'return', input, current, line)
|
||||
|
||||
|
||||
def tokenizeFunction(input, current, line):
|
||||
return tokenizeKeyword(TokenType.FUNCTION, 'function', input, current, line)
|
||||
|
||||
|
||||
def tokenizeExtend(input, current, line):
|
||||
return tokenizeKeyword(TokenType.EXTEND, "extend", input, current, line)
|
||||
|
||||
|
||||
def tokenizeImport(input, current, line):
|
||||
return tokenizeKeyword(TokenType.IMPORT, "import", input, current, line)
|
||||
|
||||
|
||||
def tokenizeFrom(input, current, line):
|
||||
return tokenizeKeyword(TokenType.FROM, "from", input, current, line)
|
||||
|
||||
|
||||
def tokenizeAs(input, current, line):
|
||||
return tokenizeKeyword(TokenType.AS, "as", input, current, line)
|
||||
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeyword
|
||||
from smnp.token.type import TokenType
|
||||
|
||||
def tokenizeReturn(input, current, line):
|
||||
return tokenizeKeyword(TokenType.RETURN, 'return', input, current, line)
|
||||
@@ -1,8 +0,0 @@
|
||||
from smnp.token.tools import tokenizeKeywords
|
||||
from smnp.token.type import TokenType
|
||||
from smnp.type.model import Type
|
||||
|
||||
|
||||
def tokenizeType(input, current, line):
|
||||
types = [ type.name.lower() for type in Type ]
|
||||
return tokenizeKeywords(TokenType.TYPE, input, current, line, *types)
|
||||
@@ -23,3 +23,6 @@ class TokenType(Enum):
|
||||
CLOSE_SQUARE = auto()
|
||||
TYPE = auto()
|
||||
EXTEND = auto()
|
||||
IMPORT = auto()
|
||||
FROM = auto()
|
||||
AS = auto()
|
||||
Reference in New Issue
Block a user