-
Notifications
You must be signed in to change notification settings - Fork 34
/
lexer_test.exs
38 lines (34 loc) · 979 Bytes
/
lexer_test.exs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
defmodule MakeupTest.LexerTest do
use ExUnit.Case, async: true
import ExUnitProperties
alias Makeup.Lexer
alias MakeupTest.Lexer.LexerStreamDataGenerators, as: Gen
alias StreamData
describe "unlex" do
test "unlex single token" do
assert Lexer.unlex([{:x, %{}, "abc"}]) == "abc"
end
test "unlex multiple tokens" do
tokens = [
{:a, %{}, "abc"},
{:b, %{}, "def"}
]
assert Lexer.unlex(tokens) == "abcdef"
end
end
describe "split into lines" do
test "after splitting, token values contain no newline characters" do
check all tokens <- Gen.tokens() do
lines = Lexer.split_into_lines(tokens)
# Lines are lists of tokens
assert Enum.all?(lines, fn line -> is_list(line) end)
# Tokens
for line <- lines do
for {_type, _attrs, value} <- line do
assert (not String.contains?(value, "\n"))
end
end
end
end
end
end