Refactor tokenizer

This commit is contained in:
Bartłomiej Pluta
2019-07-06 22:09:01 +02:00
parent fbb3f79731
commit 756f4544e4
18 changed files with 59 additions and 156 deletions

View File

@@ -1,4 +1,3 @@
from smnp.token.tools import regexPatternTokenizer
def tokenizeWhitespaces(input, current, line):
return regexPatternTokenizer(None, r'\s')(input, current, line)
whitespacesTokenizer = regexPatternTokenizer(None, r'\s')