personal tokenize util
npm i -S @kessler/tokenize
const tokenize = require('@kessler/tokenize')
let tokens1 = tokenize('text')
// with non default tokenization rules
const rules = {
tokenChars: ['(', ')', ',', '[', ']'],
globbingChars: ['"', '\''],
separators: [' ']
}
let tokens2 = tokenize('(text) "asd" blabla', rules)
MIT © Yaniv Kessler