|
| 1 | +# Copyright (c) Microsoft Corporation. All rights reserved. |
| 2 | +# Licensed under the MIT License. |
| 3 | + |
| 4 | +import aiounittest |
| 5 | +from botbuilder.dialogs.choices import Tokenizer |
| 6 | + |
| 7 | + |
| 8 | +def _assert_token(token, start, end, text, normalized=None): |
| 9 | + assert token.start == start, f"Invalid token.start of '{token.start}' for '{text}' token." |
| 10 | + assert token.end == end, f"Invalid token.end of '{token.end}' for '{text}' token." |
| 11 | + assert token.text == text, f"Invalid token.text of '{token.text}' for '{text}' token." |
| 12 | + assert token.normalized == normalized or text, f"Invalid token.normalized of '{token.normalized}' for '{text}' token." |
| 13 | + |
| 14 | + |
| 15 | +class AttachmentPromptTests(aiounittest.AsyncTestCase): |
| 16 | + def test_should_break_on_spaces(self): |
| 17 | + tokens = Tokenizer.default_tokenizer('how now brown cow') |
| 18 | + assert len(tokens) == 4 |
| 19 | + _assert_token(tokens[0], 0, 2, 'how') |
| 20 | + _assert_token(tokens[1], 4, 6, 'now') |
| 21 | + _assert_token(tokens[2], 8, 12, 'brown') |
| 22 | + _assert_token(tokens[3], 14, 16, 'cow') |
| 23 | + |
| 24 | + def test_should_break_on_punctuation(self): |
| 25 | + tokens = Tokenizer.default_tokenizer('how-now.brown:cow?') |
| 26 | + assert len(tokens) == 4 |
| 27 | + _assert_token(tokens[0], 0, 2, 'how') |
| 28 | + _assert_token(tokens[1], 4, 6, 'now') |
| 29 | + _assert_token(tokens[2], 8, 12, 'brown') |
| 30 | + _assert_token(tokens[3], 14, 16, 'cow') |
| 31 | + |
| 32 | + def test_should_tokenize_single_character_tokens(self): |
| 33 | + tokens = Tokenizer.default_tokenizer('a b c d') |
| 34 | + assert len(tokens) == 4 |
| 35 | + _assert_token(tokens[0], 0, 0, 'a') |
| 36 | + _assert_token(tokens[1], 2, 2, 'b') |
| 37 | + _assert_token(tokens[2], 4, 4, 'c') |
| 38 | + _assert_token(tokens[3], 6, 6, 'd') |
| 39 | + |
| 40 | + def test_should_return_a_single_token(self): |
| 41 | + tokens = Tokenizer.default_tokenizer('food') |
| 42 | + assert len(tokens) == 1 |
| 43 | + _assert_token(tokens[0], 0, 3, 'food') |
| 44 | + |
| 45 | + def test_should_return_no_tokens(self): |
| 46 | + tokens = Tokenizer.default_tokenizer('.?-()') |
| 47 | + assert len(tokens) == 0 |
| 48 | + |
| 49 | + def test_should_return_a_the_normalized_and_original_text_for_a_token(self): |
| 50 | + tokens = Tokenizer.default_tokenizer('fOoD') |
| 51 | + assert len(tokens) == 1 |
| 52 | + _assert_token(tokens[0], 0, 3, 'fOoD', 'food') |
| 53 | + |
| 54 | + def test_should_break_on_emojis(self): |
| 55 | + tokens = Tokenizer.default_tokenizer('food 💥👍😀') |
| 56 | + assert len(tokens) == 4 |
| 57 | + _assert_token(tokens[0], 0, 3, 'food') |
| 58 | + _assert_token(tokens[1], 5, 5, '💥') |
| 59 | + _assert_token(tokens[2], 6, 6, '👍') |
| 60 | + _assert_token(tokens[3], 7, 7, '😀') |
0 commit comments