Refactor tokenizer

This commit is contained in:
Bartłomiej Pluta
2019-07-03 01:55:08 +02:00
parent 8313d2dcfd
commit f826516d8f
41 changed files with 589 additions and 296 deletions

View File

@@ -0,0 +1,4 @@
from smnp.token.tools import tokenizeRegexPattern
def tokenizeWhitespaces(input, current, line):
return tokenizeRegexPattern(None, r'\s', input, current, line)