Add new token: TokenType.BOOL
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
from smnp.error.syntax import SyntaxException
|
||||
from smnp.token.model import TokenList
|
||||
from smnp.token.tokenizers.bool import boolTokenizer
|
||||
from smnp.token.tokenizers.comment import commentTokenizer
|
||||
from smnp.token.tokenizers.identifier import identifierTokenizer
|
||||
from smnp.token.tokenizers.keyword import typeTokenizer
|
||||
@@ -31,8 +32,9 @@ tokenizers = (
|
||||
# Types
|
||||
separated(regexPatternTokenizer(TokenType.INTEGER, r'\d')),
|
||||
stringTokenizer,
|
||||
typeTokenizer,
|
||||
noteTokenizer,
|
||||
boolTokenizer,
|
||||
typeTokenizer,
|
||||
|
||||
# Keywords
|
||||
separated(defaultTokenizer(TokenType.FUNCTION)),
|
||||
@@ -50,7 +52,6 @@ tokenizers = (
|
||||
commentTokenizer,
|
||||
)
|
||||
|
||||
|
||||
filters = [
|
||||
lambda token: token.type is not None,
|
||||
lambda token: token.type != TokenType.COMMENT
|
||||
@@ -58,33 +59,33 @@ filters = [
|
||||
|
||||
|
||||
def tokenize(lines):
|
||||
tokens = []
|
||||
for lineNumber, line in enumerate(lines):
|
||||
tokens = []
|
||||
for lineNumber, line in enumerate(lines):
|
||||
current = 0
|
||||
while current < len(line):
|
||||
consumedChars, token = combinedTokenizer(line, current, lineNumber)
|
||||
|
||||
consumedChars, token = combinedTokenizer(line, current, lineNumber)
|
||||
|
||||
if consumedChars == 0:
|
||||
raise SyntaxException(f"Unknown symbol '{line[current]}'", (lineNumber, current))
|
||||
|
||||
|
||||
current += consumedChars
|
||||
tokens.append(token)
|
||||
|
||||
|
||||
return TokenList(filterTokens(filters, tokens), lines)
|
||||
|
||||
|
||||
def combinedTokenizer(line, current, lineNumber):
|
||||
for tokenizer in tokenizers:
|
||||
consumedChars, token = tokenizer(line, current, lineNumber)
|
||||
if consumedChars > 0:
|
||||
if consumedChars > 0:
|
||||
return (consumedChars, token)
|
||||
return (0, None)
|
||||
|
||||
|
||||
def filterTokens(filters, tokens):
|
||||
def filterTokens(filters, tokens):
|
||||
if not filters:
|
||||
return tokens
|
||||
|
||||
|
||||
return list(filterTokens(filters[1:], (token for token in tokens if filters[0](token))))
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user