Refactor tokenizer
This commit is contained in:
@@ -1,4 +1,3 @@
|
||||
from smnp.token.tools import regexPatternTokenizer
|
||||
|
||||
def tokenizeWhitespaces(input, current, line):
|
||||
return regexPatternTokenizer(None, r'\s')(input, current, line)
|
||||
whitespacesTokenizer = regexPatternTokenizer(None, r'\s')
|
||||
|
||||
Reference in New Issue
Block a user