diff --git a/smnp/token/tokenizers/float.py b/smnp/token/tokenizers/float.py index 6f05557..3c3b5f1 100644 --- a/smnp/token/tokenizers/float.py +++ b/smnp/token/tokenizers/float.py @@ -1,25 +1,17 @@ from smnp.token.model import Token -from smnp.token.tools import regexPatternTokenizer, keywordTokenizer +from smnp.token.tools import regexPatternTokenizer, keywordTokenizer, allOf from smnp.token.type import TokenType -def floatTokenizer(input, current, line): - consumedChars = 0 - value = "" - consumed, token = regexPatternTokenizer(TokenType.INTEGER, r'\d')(input, current, line) - if consumed > 0: - consumedChars += consumed - value += token.value - consumed, token = keywordTokenizer(TokenType.DOT, ".")(input, current+consumedChars, line) - if consumed > 0: - consumedChars += consumed - value += token.value - consumed, token = regexPatternTokenizer(TokenType.INTEGER, r'\d')(input, current+consumedChars, line) - if consumed > 0: - consumedChars += consumed - value += token.value - print(value) - return (consumedChars, Token(TokenType.FLOAT, float(value), (current, line), value)) +def createToken(pos, beforeDot, dot, afterDot): + rawValue = f"{beforeDot.value}.{afterDot.value}" + value = float(rawValue) + return Token(TokenType.FLOAT, value, pos, rawValue) - return (0, None) \ No newline at end of file +floatTokenizer = allOf( + regexPatternTokenizer(TokenType.INTEGER, r'\d'), + keywordTokenizer(None, "."), + regexPatternTokenizer(TokenType.INTEGER, r'\d'), + createToken=createToken +) diff --git a/smnp/token/tools.py b/smnp/token/tools.py index 297a278..dfce11f 100644 --- a/smnp/token/tools.py +++ b/smnp/token/tools.py @@ -61,3 +61,19 @@ def mapValue(tokenizer, mapper): return (0, None) return tokenize + +def allOf(*tokenizers, createToken): + def combinedTokenizer(input, current, line): + consumedChars = 0 + tokens = [] + for tokenizer in tokenizers: + consumed, token = tokenizer(input, current+consumedChars, line) + if consumed > 0: + consumedChars += consumed + tokens.append(token) + else: + return (0, None) + + return (consumedChars, createToken((current, line), *tokens)) + + return combinedTokenizer \ No newline at end of file