Refactor tokenizer
This commit is contained in:
4
smnp/token/tokenizers/whitespace.py
Normal file
4
smnp/token/tokenizers/whitespace.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from smnp.token.tools import tokenizeRegexPattern
|
||||
|
||||
def tokenizeWhitespaces(input, current, line):
|
||||
return tokenizeRegexPattern(None, r'\s', input, current, line)
|
||||
Reference in New Issue
Block a user