Skip to content
This repository has been archived by the owner on Dec 15, 2022. It is now read-only.

Commit

Permalink
Merge pull request #142 from octref/custom-at-rules
Browse files Browse the repository at this point in the history
Handle syntax highlighting of custom at-rules for microsoft/vscode-css-languageservice#51
  • Loading branch information
Nathan Sobo authored Apr 4, 2019
2 parents 8d1d690 + eb0a5fd commit 6abc0a2
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 4 deletions.
33 changes: 33 additions & 0 deletions grammars/css.cson
Original file line number Diff line number Diff line change
Expand Up @@ -584,6 +584,39 @@
'include': '#string'
}
]
},
{
# @custom-at-rule
'begin': '(?i)(?=@[\\w-]+(\\s|\\(|/\\*|$))'
'end': '(?<=})(?!\\G)'
'patterns': [
{
'begin': '(?i)\\G(@)[\\w-]+'
'beginCaptures':
'0':
'name': 'keyword.control.at-rule.css'
'1':
'name': 'punctuation.definition.keyword.css'
'end': '(?=\\s*[{;])'
'name': 'meta.at-rule.header.css'
}
{
'begin': '{'
'beginCaptures':
'0':
'name': 'punctuation.section.begin.bracket.curly.css'
'end': '}'
'endCaptures':
'0':
'name': 'punctuation.section.end.bracket.curly.css'
'name': 'meta.at-rule.body.css'
'patterns': [
{
'include': '$self'
}
]
}
]
}
]
'color-keywords':
Expand Down
8 changes: 4 additions & 4 deletions spec/css-spec.coffee
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,8 @@ describe 'CSS grammar', ->
it 'does not tokenise identifiers following an @ symbol', ->
{tokens} = grammar.tokenizeLine('@some-weird-new-feature')
expect(tokens[0]).toEqual value: '@', scopes: ['source.css']
expect(tokens[1]).toEqual value: 'some-weird-new-feature', scopes: ['source.css', 'meta.selector.css']
expect(tokens[0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css', 'punctuation.definition.keyword.css']
expect(tokens[1]).toEqual value: 'some-weird-new-feature', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css']
it 'does not tokenise identifiers in unfamiliar functions', ->
{tokens} = grammar.tokenizeLine('some-edgy-new-function()')
Expand Down Expand Up @@ -621,8 +621,8 @@ describe 'CSS grammar', ->
expect(lines[0][0]).toEqual value: '/*', scopes: ['source.css', 'comment.block.css', 'punctuation.definition.comment.begin.css']
expect(lines[0][1]).toEqual value: ' Not the first line ', scopes: ['source.css', 'comment.block.css']
expect(lines[0][2]).toEqual value: '*/', scopes: ['source.css', 'comment.block.css', 'punctuation.definition.comment.end.css']
expect(lines[1][0]).toEqual value: '@', scopes: ['source.css']
expect(lines[1][1]).toEqual value: 'charset "UTF-8";', scopes: ['source.css', 'meta.selector.css']
expect(lines[1][0]).toEqual value: '@', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css', 'punctuation.definition.keyword.css']
expect(lines[1][1]).toEqual value: 'charset', scopes: ['source.css', 'meta.at-rule.header.css', 'keyword.control.at-rule.css']
it 'highlights invalid @charset statements', ->
lines = grammar.tokenizeLines " @charset 'US-ASCII';"
Expand Down

0 comments on commit 6abc0a2

Please sign in to comment.