Skip to content

Commit 2e1e8e7

Browse files
author
Kristoffer Carlsson
authored
get rid of unused fields and functions in the lexer (#56)
1 parent d3d8fde commit 2e1e8e7

File tree

2 files changed

+4
-34
lines changed

2 files changed

+4
-34
lines changed

src/tokenize.jl

Lines changed: 4 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -77,24 +77,14 @@ Ideally a lexer is stateless but some state is needed here for:
7777
"""
7878
mutable struct Lexer{IO_t <: IO}
7979
io::IO_t
80-
io_startpos::Int
8180

82-
token_start_row::Int
83-
token_start_col::Int
8481
token_startpos::Int
8582

86-
current_row::Int
87-
current_col::Int
88-
current_pos::Int
89-
9083
last_token::Kind
9184
string_states::Vector{StringState}
92-
charstore::IOBuffer
9385
chars::Tuple{Char,Char,Char,Char}
9486
charspos::Tuple{Int,Int,Int,Int}
95-
doread::Bool
9687
dotop::Bool
97-
errored::Bool
9888
end
9989

10090
function Lexer(io::IO)
@@ -121,9 +111,9 @@ function Lexer(io::IO)
121111
end
122112
end
123113
end
124-
Lexer(io, position(io), 1, 1, position(io), 1, 1, position(io),
125-
K"error", Vector{StringState}(), IOBuffer(),
126-
(c1,c2,c3,c4), (p1,p2,p3,p4), false, false, false)
114+
Lexer(io, position(io),
115+
K"error", Vector{StringState}(),
116+
(c1,c2,c3,c4), (p1,p2,p3,p4), false)
127117
end
128118
Lexer(str::AbstractString) = Lexer(IOBuffer(str))
129119

@@ -143,14 +133,8 @@ Base.eltype(::Type{<:Lexer}) = Token
143133

144134

145135
function Base.iterate(l::Lexer)
146-
seekstart(l)
147136
l.token_startpos = position(l)
148-
l.token_start_row = 1
149-
l.token_start_col = 1
150137

151-
l.current_row = 1
152-
l.current_col = 1
153-
l.current_pos = l.io_startpos
154138
t = next_token(l)
155139
return t, t.kind == K"EndMarker"
156140
end
@@ -179,15 +163,6 @@ Set a new starting position.
179163
"""
180164
startpos!(l::Lexer, i::Integer) = l.token_startpos = i
181165

182-
Base.seekstart(l::Lexer) = seek(l.io, l.io_startpos)
183-
184-
"""
185-
seek2startpos!(l::Lexer)
186-
187-
Sets the lexer's current position to the beginning of the latest `Token`.
188-
"""
189-
seek2startpos!(l::Lexer) = seek(l, startpos(l))
190-
191166
"""
192167
peekchar(l::Lexer)
193168
@@ -220,7 +195,7 @@ Base.position(l::Lexer) = l.charspos[1]
220195
eof(l::Lexer)
221196
222197
Determine whether the end of the lexer's underlying buffer has been reached.
223-
"""# Base.position(l::Lexer) = Base.position(l.io)
198+
"""
224199
Base.eof(l::Lexer) = eof(l.io)
225200

226201
Base.seek(l::Lexer, pos) = seek(l.io, pos)
@@ -233,8 +208,6 @@ position.
233208
"""
234209
function start_token!(l::Lexer)
235210
l.token_startpos = l.charspos[1]
236-
l.token_start_row = l.current_row
237-
l.token_start_col = l.current_col
238211
end
239212

240213
"""
@@ -312,7 +285,6 @@ end
312285
Returns an `K"error"` token with error `err` and starts a new `Token`.
313286
"""
314287
function emit_error(l::Lexer, err::Kind = K"error")
315-
l.errored = true
316288
@assert is_error(err)
317289
return emit(l, err)
318290
end

test/tokenize.jl

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,13 +25,11 @@ strtok(str) = untokenize.(collect(tokenize(str)), str)
2525
l = tokenize(s)
2626
@test Tokenize.readchar(l) == 'a'
2727

28-
# @test l.current_pos == 0
2928
l_old = l
3029
@test l == l_old
3130
@test Tokenize.eof(l)
3231
@test Tokenize.readchar(l) == Tokenize.EOF_CHAR
3332

34-
# @test l.current_pos == 0
3533
end
3634
end # testset
3735

0 commit comments

Comments
 (0)