Skip to content

Commit

Permalink
Simpler to not special-case consecutive _s.
Browse files Browse the repository at this point in the history
  • Loading branch information
seizethedave committed Jun 23, 2024
1 parent f4d4c5f commit f10caa0
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 13 deletions.
12 changes: 0 additions & 12 deletions internal/parser/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -551,14 +551,8 @@ outerLoop:
case numAfterUnderscore:
// The only valid transition out of _ is to a digit.
switch {
case r == '_':
return l.makeStaticErrorPoint(
"Couldn't lex number, multiple consecutive _'s",
l.location())

case r >= '0' && r <= '9':
state = numAfterOneToNine

default:
return l.makeStaticErrorPoint(
fmt.Sprintf("Couldn't lex number, junk after '_': %v", strconv.QuoteRuneToASCII(r)),
Expand Down Expand Up @@ -597,14 +591,8 @@ outerLoop:
case numAfterExpUnderscore:
// The only valid transition out of _ is to a digit.
switch {
case r == '_':
return l.makeStaticErrorPoint(
"Couldn't lex number, multiple consecutive _'s",
l.location())

case r >= '0' && r <= '9':
state = numAfterExpDigit

default:
return l.makeStaticErrorPoint(
fmt.Sprintf("Couldn't lex number, junk after '_': %v", strconv.QuoteRuneToASCII(r)),
Expand Down
2 changes: 1 addition & 1 deletion internal/parser/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ func TestNumberSeparators(t *testing.T) {
{"1.1e-10_1", "", Tokens{{kind: tokenNumber, data: "1.1e-101"}}},
{"9.109_383_56e-31", "", Tokens{{kind: tokenNumber, data: "9.10938356e-31"}}},
{"123456_!", "snippet:1:8 Couldn't lex number, junk after '_': '!'", Tokens{}},
{"123__456", "snippet:1:5 Couldn't lex number, multiple consecutive _'s", Tokens{}},
{"123__456", "snippet:1:5 Couldn't lex number, junk after '_': '_'", Tokens{}},
{"1_200_.0", "snippet:1:7 Couldn't lex number, junk after '_': '.'", Tokens{}},
{"1_200._0", "snippet:1:7 Couldn't lex number, junk after decimal point: '_'", Tokens{}},
{"1_200_e2", "snippet:1:7 Couldn't lex number, junk after '_': 'e'", Tokens{}},
Expand Down

0 comments on commit f10caa0

Please sign in to comment.