Reformat evaluator #1

This commit is contained in:
Bartłomiej Pluta
2019-07-04 17:57:12 +02:00
parent f0cbf37fe9
commit 34a0eda199
36 changed files with 470 additions and 270 deletions

View File

@@ -1,27 +1,24 @@
import sys
import time
import re
from smnp.error.syntax import SyntaxException
from smnp.token.type import TokenType
from smnp.token.model import Token, TokenList
from smnp.token.tools import tokenizeChar, tokenizeRegexPattern
from smnp.token.tokenizers.paren import tokenizeOpenParen, tokenizeCloseParen
from smnp.token.tokenizers.asterisk import tokenizeAsterisk
from smnp.token.tokenizers.whitespace import tokenizeWhitespaces
from smnp.token.tokenizers.identifier import tokenizeIdentifier
from smnp.token.tokenizers.comma import tokenizeComma
from smnp.token.tokenizers.string import tokenizeString
from smnp.token.tokenizers.integer import tokenizeInteger
from smnp.token.tokenizers.bracket import tokenizeOpenBracket, tokenizeCloseBracket
from smnp.error.syntax import SyntaxException
from smnp.token.model import TokenList
from smnp.token.tokenizers.assign import tokenizeAssign
from smnp.token.tokenizers.asterisk import tokenizeAsterisk
from smnp.token.tokenizers.bracket import tokenizeOpenBracket, tokenizeCloseBracket
from smnp.token.tokenizers.colon import tokenizeColon
from smnp.token.tokenizers.comma import tokenizeComma
from smnp.token.tokenizers.comment import tokenizeComment
from smnp.token.tokenizers.note import tokenizeNote
from smnp.token.tokenizers.function import tokenizeFunction
from smnp.token.tokenizers.ret import tokenizeReturn
from smnp.token.tokenizers.percent import tokenizePercent
from smnp.token.tokenizers.minus import tokenizeMinus
from smnp.token.tokenizers.dot import tokenizeDot
from smnp.token.tokenizers.function import tokenizeFunction
from smnp.token.tokenizers.identifier import tokenizeIdentifier
from smnp.token.tokenizers.integer import tokenizeInteger
from smnp.token.tokenizers.minus import tokenizeMinus
from smnp.token.tokenizers.note import tokenizeNote
from smnp.token.tokenizers.paren import tokenizeOpenParen, tokenizeCloseParen
from smnp.token.tokenizers.percent import tokenizePercent
from smnp.token.tokenizers.ret import tokenizeReturn
from smnp.token.tokenizers.string import tokenizeString
from smnp.token.tokenizers.whitespace import tokenizeWhitespaces
from smnp.token.type import TokenType
tokenizers = (
tokenizeOpenParen,
@@ -58,7 +55,7 @@ def tokenize(lines):
consumedChars, token = combinedTokenizer(line, current, lineNumber)
if consumedChars == 0:
raise SyntaxException((lineNumber, current), f"Unknown symbol '{line[current]}'")
raise SyntaxException(f"Unknown symbol '{line[current]}'", (lineNumber, current))
current += consumedChars
tokens.append(token)