Improve float type tokenizer

This commit is contained in:
Bartłomiej Pluta
2019-07-25 13:02:33 +02:00
parent 0657214aa3
commit 6222dccaac
2 changed files with 27 additions and 19 deletions

View File

@@ -1,25 +1,17 @@
from smnp.token.model import Token from smnp.token.model import Token
from smnp.token.tools import regexPatternTokenizer, keywordTokenizer from smnp.token.tools import regexPatternTokenizer, keywordTokenizer, allOf
from smnp.token.type import TokenType from smnp.token.type import TokenType
def floatTokenizer(input, current, line): def createToken(pos, beforeDot, dot, afterDot):
consumedChars = 0 rawValue = f"{beforeDot.value}.{afterDot.value}"
value = "" value = float(rawValue)
consumed, token = regexPatternTokenizer(TokenType.INTEGER, r'\d')(input, current, line) return Token(TokenType.FLOAT, value, pos, rawValue)
if consumed > 0:
consumedChars += consumed
value += token.value
consumed, token = keywordTokenizer(TokenType.DOT, ".")(input, current+consumedChars, line)
if consumed > 0:
consumedChars += consumed
value += token.value
consumed, token = regexPatternTokenizer(TokenType.INTEGER, r'\d')(input, current+consumedChars, line)
if consumed > 0:
consumedChars += consumed
value += token.value
print(value)
return (consumedChars, Token(TokenType.FLOAT, float(value), (current, line), value))
return (0, None) floatTokenizer = allOf(
regexPatternTokenizer(TokenType.INTEGER, r'\d'),
keywordTokenizer(None, "."),
regexPatternTokenizer(TokenType.INTEGER, r'\d'),
createToken=createToken
)

View File

@@ -61,3 +61,19 @@ def mapValue(tokenizer, mapper):
return (0, None) return (0, None)
return tokenize return tokenize
def allOf(*tokenizers, createToken):
def combinedTokenizer(input, current, line):
consumedChars = 0
tokens = []
for tokenizer in tokenizers:
consumed, token = tokenizer(input, current+consumedChars, line)
if consumed > 0:
consumedChars += consumed
tokens.append(token)
else:
return (0, None)
return (consumedChars, createToken((current, line), *tokens))
return combinedTokenizer