Skip to content

Commit

Permalink
Revert #128, update docs and add tests to Tokenizer.Remaining() (#129)
Browse files Browse the repository at this point in the history
  • Loading branch information
Steve van Loben Sels authored Nov 10, 2022
1 parent b2d0aeb commit 3b49d71
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 18 deletions.
25 changes: 8 additions & 17 deletions json/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,17 +43,6 @@ type Tokenizer struct {
// null, true, false, numbers, or quoted strings.
Value RawValue

// Position is the Tokenizer's current index into the underlying byte slice.
// Since the Tokenizer has already been advanced by calling Next, this
// position will be the first index of the next token. The position of
// the current Value can be calculated by subtracting len(token.value).
// Accordingly, slicing the underlying bytes like:
//
// b[token.Position-len(token.Value):token.Position]
//
// will yield the current Value.
Position int

// When the tokenizer has encountered invalid content this field is not nil.
Err error

Expand Down Expand Up @@ -102,7 +91,6 @@ func (t *Tokenizer) Reset(b []byte) {
// However, it does not compile down to an invocation of duff-copy.
t.Delim = 0
t.Value = nil
t.Position = 0
t.Err = nil
t.Depth = 0
t.Index = 0
Expand Down Expand Up @@ -139,16 +127,13 @@ skipLoop:

if i > 0 {
t.json = t.json[i:]
t.Position += i
}

if len(t.json) == 0 {
t.Reset(nil)
return false
}

lenBefore := len(t.json)

var kind Kind
switch t.json[0] {
case '"':
Expand Down Expand Up @@ -179,8 +164,6 @@ skipLoop:
t.Value, t.json, t.Err = t.json[:1], t.json[1:], syntaxError(t.json, "expected token but found '%c'", t.json[0])
}

t.Position += lenBefore - len(t.json)

t.Depth = t.depth()
t.Index = t.index()
t.flags = t.flags.withKind(kind)
Expand Down Expand Up @@ -319,6 +302,14 @@ func (t *Tokenizer) String() []byte {
}

// Remaining returns the number of bytes left to parse.
//
// The position of the tokenizer's current Value within the original byte slice
// can be calculated like so:
//
// end := len(b) - tok.Remaining()
// start := end - len(tok.Value)
//
// And slicing b[start:end] will yield the tokenizer's current Value.
func (t *Tokenizer) Remaining() int {
return len(t.json)
}
Expand Down
3 changes: 2 additions & 1 deletion json/token_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ func tokenize(t *testing.T, b []byte) (tokens []token) {
tok := NewTokenizer(b)

for tok.Next() {
start, end := tok.Position-len(tok.Value), tok.Position
end := len(b) - tok.Remaining()
start := end - len(tok.Value)
if end > len(b) {
t.Fatalf("token position too far [%d:%d], len(b) is %d", start, end, len(b))
}
Expand Down

0 comments on commit 3b49d71

Please sign in to comment.