We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent ef44e60 commit d8d22f6Copy full SHA for d8d22f6
pythainlp/tokenize/longest.py
@@ -172,4 +172,3 @@ def segment(text: str, custom_dict: Trie = DEFAULT_WORD_DICT_TRIE) -> List[str]:
172
_tokenizers[custom_dict_ref_id] = LongestMatchTokenizer(custom_dict)
173
174
return _tokenizers[custom_dict_ref_id].tokenize(text)
175
- # return LongestMatchTokenizer(custom_dict).tokenize(text)
0 commit comments