This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
export const STATE_IDENTIFYING = "STATE_IDENTIFYING" | |
export const STATE_TOKENIZING_NORMAL = "STATE_TOKENIZING_NORMAL" | |
export const STATE_TOKENIZING_SPECIAL = "STATE_TOKENIZING_SPECIAL" | |
export const TOKEN_SEPARATOR = "/" | |
export const TOKEN_SPECIAL_PREFIX = "{" | |
export const TOKEN_SPECIAL_SUFFIX = "}" | |
export const TOKEN_TYPE_NORMAL = "normal" | |
export const TOKEN_TYPE_SPECIAL = "special" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
export const STATE_SEARCHING = "STATE_SEARCHING" | |
export const STATE_TOKENIZING_HASHTAG = "STATE_TOKENIZING_HASHTAG" | |
export const STATE_TOKENIZING_MENTION = "STATE_TOKENIZING_MENTION" | |
export const PREFIX_HASHTAG = "#" | |
export const PREFIX_MENTION = "@" | |
export const tokenize = function(text) { | |
let characters = [...text] | |
let state = STATE_SEARCHING | |
let tokens = [] |
OlderNewer