@@ -141,7 +141,7 @@ describe 'Go grammar', ->
141141
142142  it  ' tokenizes types' -> 
143143    types  =  [
144-       ' chan ' ,    ' map ' ,      ' bool'   ' string' ' error'    ' int'     ' int8' ' int16' 
144+       ' bool' ' string' ' error' ' int' ' int8'        ' int16' 
145145      ' int32' ' int64' ' rune' ' byte' ' uint' ' uint8' ' uint16' ' uint32' 
146146      ' uint64' ' uintptr' ' float32' ' float64' ' complex64' ' complex128' 
147147    ]
@@ -185,7 +185,7 @@ describe 'Go grammar', ->
185185
186186      next  =  tokens[t .tokenPos  +  1 ]
187187      expect (next .value ).toEqual  ' (' 
188-       expect (next .scopes ).toEqual  [' source.go' ' keyword.operator.bracket .go' 
188+       expect (next .scopes ).toEqual  [' source.go' ' meta.brace.round .go' 
189189
190190  it  ' only tokenizes "func" when it is an exact match' -> 
191191    tests  =  [' myfunc' ' funcMap' 
@@ -196,28 +196,33 @@ describe 'Go grammar', ->
196196
197197  it  ' tokenizes func names in their declarations' -> 
198198    tests  =  [
199-       {
200-         ' line' :  ' func f()' 
201-         ' tokenPos' :  2 
202-       }
199+       #   {
200+       #     'line': 'func f()'
201+       #     'tokenPos': 2
202+       #   }
203203      {
204204        ' line' :  ' func (T) f()' 
205205        ' tokenPos' :  6 
206206      }
207-       {
208-         ' line' :  ' func (t T) f()' 
209-         ' tokenPos' :  8 
210-       }
211-       {
212-         ' line' :  ' func (t *T) f()' 
213-         ' tokenPos' :  9 
214-       }
207+       #   {
208+       #     'line': 'func (t T) f()'
209+       #     'tokenPos': 8
210+       #   }
211+       #   {
212+       #     'line': 'func (t *T) f()'
213+       #     'tokenPos': 9
214+       #   }
215215    ]
216216
217217    for  t in  tests
218218      {tokens } =  grammar .tokenizeLine  t .line 
219219      expect (tokens[0 ].value ).toEqual  ' func' 
220220      expect (tokens[0 ].scopes ).toEqual  [' source.go' ' keyword.go' 
221+       expect (tokens[1 ].value ).toEqual  '  ' 
222+       expect (tokens[2 ].value ).toEqual  ' (' 
223+       expect (tokens[3 ].value ).toEqual  ' T' 
224+       expect (tokens[4 ].value ).toEqual  ' )' 
225+       expect (tokens[5 ].value ).toEqual  '  ' 
221226
222227      relevantToken  =  tokens[t .tokenPos ]
223228      expect (relevantToken).toBeDefined ()
@@ -226,7 +231,7 @@ describe 'Go grammar', ->
226231
227232      next  =  tokens[t .tokenPos  +  1 ]
228233      expect (next .value ).toEqual  ' (' 
229-       expect (next .scopes ).toEqual  [' source.go' ' keyword.operator.bracket .go' 
234+       expect (next .scopes ).toEqual  [' source.go' ' meta.brace.round .go' 
230235
231236  it  ' tokenizes receiver types in method declarations' -> 
232237    tests  =  [
@@ -256,7 +261,7 @@ describe 'Go grammar', ->
256261
257262      next  =  tokens[t .tokenPos  +  1 ]
258263      expect (next .value ).toEqual  ' )' 
259-       expect (next .scopes ).toEqual  [' source.go' ' keyword.operator.bracket .go' 
264+       expect (next .scopes ).toEqual  [' source.go' ' meta.brace.round .go' 
260265
261266  it  ' tokenizes numerics' -> 
262267    numerics  =  [
@@ -300,7 +305,7 @@ describe 'Go grammar', ->
300305    opers  =  [
301306      ' +' ' &' ' +=' ' &=' ' &&' ' ==' ' !=' ' -' ' |' ' -=' ' |=' ' ||' ' <' 
302307      ' <=' ' *' ' ^' ' *=' ' ^=' ' <-' ' >' ' >=' ' /' ' <<' ' /=' 
303-       ' <<=' ' ++' ' =' ' :=' ' ; ' ,  ' %' ' >>' ' %=' ' >>=' ' --' ' !' ' ...' 
308+       ' <<=' ' ++' ' =' ' :=' ' %' ' >>' ' %=' ' >>=' ' --' ' !' ' ...' 
304309      ' &^' ' &^=' 
305310    ]
306311
@@ -317,33 +322,56 @@ describe 'Go grammar', ->
317322
318323  it  ' tokenizes bracket operators' -> 
319324    opers  =  [
320-       ' [' ' ]' ' (' ' )' ' {' ' }' 
325+       {
326+         values :  [ ' [' ' ]' 
327+         scope :  ' meta.brace.square.go' 
328+       }
329+       {
330+         values :  [ ' (' ' )' 
331+         scope :  ' meta.brace.round.go' 
332+       }
333+       {
334+         values :  [ ' {' ' }' 
335+         scope :  ' meta.brace.curly.go' 
336+       }
321337    ]
322338
323-     for  op in  opers
324-       {tokens } =  grammar .tokenizeLine  op
339+     for  ops in  opers
340+       for  op in  ops
341+         {tokens } =  grammar .tokenizeLine  op
325342
326-       fullOp  =  tokens .map ((tok ) ->  tok .value ).join (' ' 
327-       expect (fullOp).toEqual  op
343+          fullOp  =  tokens .map ((tok ) ->  tok .value ).join (' ' 
344+          expect (fullOp).toEqual  op
328345
329-       scopes  =  tokens .map  (tok) ->  tok .scopes 
330-       allKeywords  =  scopes .every  (scope) ->  ' keyword.operator.bracket.go ' in  scope
346+          scopes  =  tokens .map  (tok) ->  tok .scopes 
347+          allKeywords  =  scopes .every  (scope) ->  ops . scope  in  scope
331348
332-       expect (allKeywords).toBe  true 
349+          expect (allKeywords).toBe  true 
333350
334351  it  ' tokenizes punctuation operators' -> 
335352    opers  =  [
336-       ' .' ' ,' ' :' 
353+       {
354+         value :  ' ,' 
355+         scope :  ' meta.delimiter.comma.go' 
356+       }
357+       {
358+         value :  ' .' 
359+         scope :  ' meta.delimiter.period.go' 
360+       }
361+       {
362+         value :  ' :' 
363+         scope :  ' meta.delimiter.colon.go' 
364+       }
337365    ]
338366
339367    for  op in  opers
340-       {tokens } =  grammar .tokenizeLine  op
368+       {tokens } =  grammar .tokenizeLine  op . value 
341369
342370      fullOp  =  tokens .map ((tok ) ->  tok .value ).join (' ' 
343-       expect (fullOp).toEqual  op
371+       expect (fullOp).toEqual  op . value 
344372
345373      scopes  =  tokens .map  (tok) ->  tok .scopes 
346-       allKeywords  =  scopes .every  (scope) ->  ' keyword.operator.punctuation.go ' in  scope
374+       allKeywords  =  scopes .every  (scope) ->  op . scope  in  scope
347375
348376      expect (allKeywords).toBe  true 
349377
@@ -400,7 +428,7 @@ describe 'Go grammar', ->
400428
401429        next  =  tokens[t .tokenPos  +  1 ]
402430        expect (next .value ).toEqual  ' (' 
403-         expect (next .scopes ).toEqual  [' source.go' ' keyword.operator.bracket .go' 
431+         expect (next .scopes ).toEqual  [' source.go' ' meta.brace.round .go' 
404432      else 
405433        expect (relevantToken .scopes ).not .toEqual  want
406434
@@ -409,23 +437,23 @@ describe 'Go grammar', ->
409437      expect (token .value ).toBe  ' var' 
410438      expect (token .scopes ).toEqual  [' source.go' ' keyword.go' 
411439
412-     wantedScope  =  [' source.go' ' variable.go' 
440+     plainScope  =  [' source.go' ' variable.go' 
413441
414442    testName  =  (token , name ) -> 
415443      expect (token .value ).toBe  name
416-       expect (token .scopes ).toEqual  wantedScope 
444+       expect (token .scopes ).toEqual  plainScope 
417445
418446    testOp  =  (token , op ) -> 
419447      expect (token .value ).toBe  op
420448      expect (token .scopes ).toEqual  [' source.go' ' keyword.operator.go' 
421449
422450    testOpBracket  =  (token , op ) -> 
423451      expect (token .value ).toBe  op
424-       expect (token .scopes ).toEqual  [' source.go' ' keyword.operator.bracket .go' 
452+       expect (token .scopes ).toEqual  [' source.go' ' meta.brace.round .go' 
425453
426454    testOpPunctuation  =  (token , op ) -> 
427455      expect (token .value ).toBe  op
428-       expect (token .scopes ).toEqual  [' source.go' ' keyword.operator.punctuation .go' 
456+       expect (token .scopes ).toEqual  [' source.go' ' meta.delimiter.comma .go' 
429457
430458    testType  =  (token , name ) -> 
431459      expect (token .value ).toBe  name
0 commit comments