@@ -301,7 +301,7 @@ describe 'Rust grammar', ->
301
301
expect (tokens[2 ]).toEqual value : ' text' , scopes : [' source.rust' ]
302
302
303
303
it ' tokenizes keywords' , ->
304
- for t in [' crate' , ' extern' , ' mod' , ' let' , ' proc' , ' ref' , ' use' , ' super' , ' as ' , ' move' ]
304
+ for t in [' crate' , ' extern' , ' mod' , ' let' , ' proc' , ' ref' , ' use' , ' super' , ' move' ]
305
305
{tokens } = grammar .tokenizeLine (" text #{ t} text" )
306
306
expect (tokens[0 ]).toEqual value : ' text ' , scopes : [' source.rust' ]
307
307
expect (tokens[1 ]).toEqual value : t, scopes : [' source.rust' , ' keyword.other.rust' ]
@@ -630,3 +630,11 @@ describe 'Rust grammar', ->
630
630
expect (tokens[8 ][0 ]).toEqual value : ' _a0' , scopes : [' source.rust' , ' entity.name.function.rust' ]
631
631
expect (tokens[9 ][0 ]).toEqual value : ' _0a' , scopes : [' source.rust' , ' entity.name.function.rust' ]
632
632
expect (tokens[10 ][0 ]).toEqual value : ' __' , scopes : [' source.rust' , ' entity.name.function.rust' ]
633
+
634
+ it ' tokenizes `as` as an operator (issue \\ #110)' , ->
635
+ {tokens } = grammar .tokenizeLine (' let i = 10 as f32;' )
636
+ expect (tokens[0 ]).toEqual value : ' let' , scopes : [' source.rust' , ' keyword.other.rust' ]
637
+ expect (tokens[2 ]).toEqual value : ' =' , scopes : [' source.rust' , ' keyword.operator.assignment.rust' ]
638
+ expect (tokens[4 ]).toEqual value : ' 10' , scopes : [' source.rust' , ' constant.numeric.integer.decimal.rust' ]
639
+ expect (tokens[6 ]).toEqual value : ' as' , scopes : [' source.rust' , ' keyword.operator.misc.rust' ]
640
+ expect (tokens[8 ]).toEqual value : ' f32' , scopes : [' source.rust' , ' storage.type.core.rust' ]
0 commit comments