Improve lists

This commit is contained in:
Bartłomiej Pluta
2019-07-05 18:17:00 +02:00
parent acd9a42e1b
commit ad19e851ec
6 changed files with 48 additions and 46 deletions

View File

@@ -1,4 +1,5 @@
from smnp.newast.node.expression import ExpressionNode
from smnp.newast.node.ignore import IgnoredNode
from smnp.newast.node.model import Node, ParseResult
from smnp.newast.node.none import NoneNode
from smnp.newast.parser import Parser
@@ -26,11 +27,11 @@ def abstractIterableParser(iterableNodeType, openTokenType, closeTokenType, item
AbstractIterableTailNode._parser1(),
AbstractIterableTailNode._parser2(),
)(input)
@staticmethod
def _parser1():
return Parser.terminalParser(closeTokenType)
@staticmethod
def _parser2():
def createNode(comma, expr, iterableTail):
@@ -46,7 +47,6 @@ def abstractIterableParser(iterableNodeType, openTokenType, closeTokenType, item
createNode=createNode
)
class AbstractIterableNode(ExpressionNode):
def __init__(self, pos):
super().__init__(pos)
@@ -67,7 +67,7 @@ def abstractIterableParser(iterableNodeType, openTokenType, closeTokenType, item
AbstractIterableNode._parser1(),
AbstractIterableNode._parser2()
)(input)
@staticmethod
def _parser1():
def emptyIterable(openToken, closeToken):
@@ -82,7 +82,6 @@ def abstractIterableParser(iterableNodeType, openTokenType, closeTokenType, item
createNode=emptyIterable
)
@staticmethod
def _parser2():
def createNode(openParen, expr, iterableTail):
@@ -98,17 +97,34 @@ def abstractIterableParser(iterableNodeType, openTokenType, closeTokenType, item
createNode=createNode
)
def toDesiredType(parser):
def parse(input):
result = parser(input)
if result.result:
node = iterableNodeType(result.node.pos)
node.children.clear()
node.children.extend([ result.node.value, result.node.next ])
return ParseResult.OK(node)
return toFlatDesiredNode(iterableNodeType, AbstractIterableNode.parse)
return ParseResult.FAIL()
return parse
def toFlatDesiredNode(iterableNodeType, parser):
def parse(input):
result = parser(input)
return toDesiredType(AbstractIterableNode.parse)
if result.result:
value = flattenList(result.node)
node = iterableNodeType(result.node.pos)
node.children.clear()
for v in value:
node.append(v)
return ParseResult.OK(node)
return ParseResult.FAIL()
return parse
def flattenList(node, output=None):
if output is None:
output = []
if type(node.value) != IgnoredNode:
output.append(node.value)
if type(node.next) != IgnoredNode:
flattenList(node.next, output)
return output

View File

@@ -7,4 +7,4 @@ class ListNode(ExpressionNode):
@classmethod
def _parse(cls, input):
return abstractIterableParser(ListNode, TokenType.OPEN_PAREN, TokenType.CLOSE_PAREN, ExpressionNode.parse)(input)
return abstractIterableParser(ListNode, TokenType.OPEN_SQUARE, TokenType.CLOSE_SQUARE, ExpressionNode.parse)(input)

View File

@@ -15,30 +15,4 @@ def _flatListNode(listItemNode, list = []):
# NEW AST
# def toFlatList(parser):
# def decoratedParser(input):
# result = parser(input)
#
# if result.result:
# value = flattenList(result.node)
# node = iterableNodeType()
# for v in value:
# node.append(v)
# return ParseResult.OK(node)
#
# return ParseResult.FAIL()
#
# return decoratedParser
#
#
# def flattenList(node, output=None):
# if output is None:
# output = []
#
# if type(node.value) != IgnoredNode:
# output.append(node.value)
#
# if type(node.next) != IgnoredNode:
# flattenList(node.next, output)
#
# return output
#

View File

@@ -1,5 +1,4 @@
from smnp.error.syntax import SyntaxException
from smnp.error.syntax import SyntaxException
from smnp.token.model import TokenList
from smnp.token.tokenizers.assign import tokenizeAssign
from smnp.token.tokenizers.asterisk import tokenizeAsterisk
@@ -16,13 +15,16 @@ from smnp.token.tokenizers.note import tokenizeNote
from smnp.token.tokenizers.paren import tokenizeOpenParen, tokenizeCloseParen
from smnp.token.tokenizers.percent import tokenizePercent
from smnp.token.tokenizers.ret import tokenizeReturn
from smnp.token.tokenizers.square import tokenizeOpenSquare, tokenizeCloseSquare
from smnp.token.tokenizers.string import tokenizeString
from smnp.token.tokenizers.whitespace import tokenizeWhitespaces
from smnp.token.type import TokenType
tokenizers = (
tokenizeOpenParen,
tokenizeCloseParen,
tokenizeCloseParen,
tokenizeOpenSquare,
tokenizeCloseSquare,
tokenizeAsterisk,
tokenizeString,
tokenizeFunction,

View File

@@ -0,0 +1,8 @@
from smnp.token.tools import tokenizeChar
from smnp.token.type import TokenType
def tokenizeOpenSquare(input, current, line):
return tokenizeChar(TokenType.OPEN_SQUARE, '[', input, current, line)
def tokenizeCloseSquare(input, current, line):
return tokenizeChar(TokenType.CLOSE_SQUARE, ']', input, current, line)

View File

@@ -19,3 +19,5 @@ class TokenType(Enum):
FUNCTION = 16
RETURN = 17
DOT = 18
OPEN_SQUARE = 19
CLOSE_SQUARE = 20