-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtoken.go
64 lines (55 loc) · 1.1 KB
/
token.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
package lexer
import "github.com/tekwizely/go-parsing/lexer/token"
const (
// TLexErr represents a Lexer error
//
TLexErr token.Type = iota
// TUnknown represents Unknown rune(s)
//
TUnknown
// TEof represents end of file
//
TEof
// TStart is a marker for user tokens ( use TStart + iota )
//
TStart
// tEnd is an internal marker
//
tEnd
)
// token is the internal structure that backs the lexer's Token.
//
type _token struct {
typ token.Type
value string
line int
column int
}
// newToken
//
func newToken(typ token.Type, value string, line int, column int) *_token {
return &_token{typ: typ, value: value, line: line, column: column}
}
// Type implements Token.Type().
//
func (t *_token) Type() token.Type {
return t.typ
}
// Value implements Token.Value().
//
func (t *_token) Value() string {
return t.value
}
// Line implements Token.Line().
//
func (t *_token) Line() int {
return t.line
}
// Column implements Token.Column().
//
func (t *_token) Column() int {
return t.column
}
// eof returns true if the token.Type == TEof.
//
func (t *_token) eof() bool { return TEof == t.typ }