diff --git a/smnp/ast/node/atom.py b/smnp/ast/node/atom.py index 45fdff0..1bfbb77 100644 --- a/smnp/ast/node/atom.py +++ b/smnp/ast/node/atom.py @@ -44,15 +44,7 @@ class TypeLiteral(Atom): def IntegerParser(input): - return Parser.oneOf( - Parser.terminal(TokenType.INTEGER, lambda val, pos: IntegerLiteral.withValue(int(val), pos)), - Parser.allOf( - Parser.terminal(TokenType.MINUS), - Parser.terminal(TokenType.INTEGER, lambda val, pos: IntegerLiteral.withValue(int(val), pos)), - createNode=lambda minus, integer: IntegerLiteral.withValue(-integer.value, minus.pos), - name="negative integer" - ) - )(input) + return Parser.terminal(TokenType.INTEGER, createNode=IntegerLiteral.withValue)(input) def StringParser(input): diff --git a/smnp/token/tokenizer.py b/smnp/token/tokenizer.py index 0caa830..9495dee 100644 --- a/smnp/token/tokenizer.py +++ b/smnp/token/tokenizer.py @@ -8,7 +8,7 @@ from smnp.token.tokenizers.note import noteTokenizer from smnp.token.tokenizers.relation import relationOperatorTokenizer from smnp.token.tokenizers.string import stringTokenizer from smnp.token.tokenizers.whitespace import whitespacesTokenizer -from smnp.token.tools import defaultTokenizer, separated, regexPatternTokenizer +from smnp.token.tools import defaultTokenizer, separated, regexPatternTokenizer, mapValue from smnp.token.type import TokenType tokenizers = ( @@ -39,7 +39,7 @@ tokenizers = ( defaultTokenizer(TokenType.DOT), # Types - separated(regexPatternTokenizer(TokenType.INTEGER, r'\d')), + mapValue(separated(regexPatternTokenizer(TokenType.INTEGER, r'\d')), int), stringTokenizer, noteTokenizer, boolTokenizer, diff --git a/smnp/token/tools.py b/smnp/token/tools.py index d6ac7ad..297a278 100644 --- a/smnp/token/tools.py +++ b/smnp/token/tools.py @@ -50,3 +50,14 @@ def separated(tokenizer, end=r"\W"): return (0, None) return separated + + +def mapValue(tokenizer, mapper): + def tokenize(input, current, line): + consumedChars, token = tokenizer(input, current, line) + if consumedChars > 0: + return (consumedChars, Token(token.type, mapper(token.value), token.pos)) + + return (0, None) + + return tokenize