Skip to content
This repository was archived by the owner on Jan 25, 2022. It is now read-only.

Commit d4a0a7f

Browse files
committed
Recognize reserved keywords (closes #94)
1 parent 0069d1f commit d4a0a7f

File tree

2 files changed

+21
-2
lines changed

2 files changed

+21
-2
lines changed

grammars/rust.cson

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -280,7 +280,12 @@
280280
{
281281
'comment': 'Keyword'
282282
'name': 'keyword.other.rust'
283-
'match': '\\b(crate|extern|mod|let|proc|ref|use|super|move)\\b'
283+
'match': '\\b(crate|extern|mod|let|ref|use|super|move)\\b'
284+
}
285+
{
286+
'comment': 'Reserved keyword'
287+
'name': 'invalid.deprecated.rust'
288+
'match': '\\b(abstract|alignof|become|do|final|macro|offsetof|override|priv|proc|pure|sizeof|typeof|virtual|yield)\\b'
284289
}
285290
{ 'include': '#unsafe' }
286291
{ 'include': '#sigils' }

spec/rust-spec.coffee

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -301,12 +301,19 @@ describe 'Rust grammar', ->
301301
expect(tokens[2]).toEqual value: ' text', scopes: ['source.rust']
302302

303303
it 'tokenizes keywords', ->
304-
for t in ['crate', 'extern', 'mod', 'let', 'proc', 'ref', 'use', 'super', 'move']
304+
for t in ['crate', 'extern', 'mod', 'let', 'ref', 'use', 'super', 'move']
305305
{tokens} = grammar.tokenizeLine("text #{t} text")
306306
expect(tokens[0]).toEqual value: 'text ', scopes: ['source.rust']
307307
expect(tokens[1]).toEqual value: t, scopes: ['source.rust', 'keyword.other.rust']
308308
expect(tokens[2]).toEqual value: ' text', scopes: ['source.rust']
309309

310+
it 'tokenizes reserved keywords', ->
311+
for t in ['abstract', 'alignof', 'become', 'do', 'final', 'macro', 'offsetof', 'override', 'priv', 'proc', 'pure', 'sizeof', 'typeof', 'virtual', 'yield']
312+
{tokens} = grammar.tokenizeLine("text #{t} text")
313+
expect(tokens[0]).toEqual value: 'text ', scopes: ['source.rust']
314+
expect(tokens[1]).toEqual value: t, scopes: ['source.rust', 'invalid.deprecated.rust']
315+
expect(tokens[2]).toEqual value: ' text', scopes: ['source.rust']
316+
310317
it 'tokenizes unsafe keyword', ->
311318
{tokens} = grammar.tokenizeLine('text unsafe text')
312319
expect(tokens[0]).toEqual value: 'text ', scopes: ['source.rust']
@@ -638,3 +645,10 @@ describe 'Rust grammar', ->
638645
expect(tokens[4]).toEqual value: '10', scopes: ['source.rust', 'constant.numeric.integer.decimal.rust']
639646
expect(tokens[6]).toEqual value: 'as', scopes: ['source.rust', 'keyword.operator.misc.rust']
640647
expect(tokens[8]).toEqual value: 'f32', scopes: ['source.rust', 'storage.type.core.rust']
648+
649+
it 'tokenizes a reserved keyword as deprecated (issue \\#94)', ->
650+
{tokens} = grammar.tokenizeLine('let priv = 10;')
651+
expect(tokens[0]).toEqual value: 'let', scopes: ['source.rust', 'keyword.other.rust']
652+
expect(tokens[2]).toEqual value: 'priv', scopes: ['source.rust', 'invalid.deprecated.rust']
653+
expect(tokens[4]).toEqual value: '=', scopes: ['source.rust', 'keyword.operator.assignment.rust']
654+
expect(tokens[6]).toEqual value: '10', scopes: ['source.rust', 'constant.numeric.integer.decimal.rust']

0 commit comments

Comments
 (0)