diff --git a/smnp/token/tokenizers/string.py b/smnp/token/tokenizers/string.py index 4135036..69924c8 100644 --- a/smnp/token/tokenizers/string.py +++ b/smnp/token/tokenizers/string.py @@ -13,5 +13,5 @@ def stringTokenizer(input, current, line): char = input[current + consumedChars] value += char consumedChars += 1 - return (consumedChars, Token(TokenType.STRING, value[1:len(value)-1], (line, current))) + return (consumedChars, Token(TokenType.STRING, value[1:len(value)-1], (line, current), value)) return (0, None)