Skip to content

Commit

Permalink
Field syntax highlighting improvements and tests.
Browse files Browse the repository at this point in the history
  • Loading branch information
ivanz committed Dec 5, 2016
1 parent 32d3ef4 commit 5ca75df
Show file tree
Hide file tree
Showing 4 changed files with 188 additions and 19 deletions.
65 changes: 56 additions & 9 deletions syntaxes/csharp.json
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,43 @@
}
]
},
"field-declaration": {
"patterns": [
{
"begin": "(?=(?:(?:(?:private|public|volatile|internal|protected|static|readonly|const)\\s*)*)(?:[\\w\\s,<>\\[\\]]+?)(?:[\\w]+)\\s*(?:;|=|=>))",
"end": "(?=;)",
"patterns": [
{
"match": "^\\s*((?:(?:private|public|volatile|internal|protected|static|readonly|const)\\s*)*)\\s*([\\w\\s,<>\\[\\]]+?)\\s*([\\w]+)\\s*(?=;|=)",
"captures": {
"1" : {
"patterns": [
{
"include": "#storage-modifiers"
}
]
},
"2" : {
"name": "storage.type.cs"
},
"3": {
"name": "entity.name.variable.cs"
}
}
},
{
"begin": "(?==>?)",
"end": "(?=;)",
"patterns": [
{
"include": "#code"
}
]
}
]
}
]
},
"variable": {
"patterns": [
{
Expand Down Expand Up @@ -123,8 +160,8 @@
}
]
},
"genericConstraints": {
"begin": "(where)\\s*(\\w+)\\s*:",
"generic-constraints": {
"begin": "(where)\\s+(\\w+)\\s*:",
"end": "(?={)",
"beginCaptures": {
"1": {
Expand All @@ -148,15 +185,15 @@
}
},
{
"match": "([\\w<>]+)\\s*(?=,|where|{)",
"match": "([\\w<>,\\[\\]]+)\\s*(?=,|where|{)",
"captures": {
"1": {
"name": "storage.type.cs"
}
}
},
{
"include": "#genericConstraints"
"include": "#generic-constraints"
}
]
},
Expand Down Expand Up @@ -203,7 +240,7 @@
]
},
{
"include": "#genericConstraints"
"include": "#generic-constraints"
},
{
"begin": "{",
Expand All @@ -216,15 +253,25 @@
"name": "meta.class.body.cs",
"patterns": [
{
"include": "#method"
},
{
"include": "#code"
"include": "#type-body"
}
]
}
]
},
"type-body": {
"patterns": [
{
"include": "#field-declaration"
},
{
"include": "#method"
},
{
"include": "#code"
}
]
},
"code": {
"patterns": [
{
Expand Down
18 changes: 9 additions & 9 deletions test/syntaxes/class.tests.ts
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ const input = `
namespace TestNamespace
{
class PublicClass<T> where T : ISomething { }
class PublicClass<T, X> : List<T>, ISomething where T : ICar, new() where X : struct { }
class PublicClass<T, X> : Dictionary<T, List<string>[]>, ISomething where T : ICar, new() where X : struct { }
}`;
let tokens: Token[] = TokenizerUtil.tokenize(input);

Expand All @@ -112,15 +112,15 @@ namespace TestNamespace

tokens.should.contain(Tokens.ClassKeyword("class", 5, 5));
tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 5, 11));
tokens.should.contain(Tokens.Type("List<T>", 5, 31));
tokens.should.contain(Tokens.Type("ISomething", 5, 40));
tokens.should.contain(Tokens.Keyword("where", 5, 51));
tokens.should.contain(Tokens.Type("T", 5, 57));
tokens.should.contain(Tokens.Type("ICar", 5, 61));
tokens.should.contain(Tokens.Keyword("new", 5, 67));
tokens.should.contain(Tokens.Type("Dictionary<T, List<string>[]>", 5, 31));
tokens.should.contain(Tokens.Type("ISomething", 5, 62));
tokens.should.contain(Tokens.Keyword("where", 5, 73));
tokens.should.contain(Tokens.Type("X", 5, 79));
tokens.should.contain(Tokens.Keyword("struct", 5, 83));
tokens.should.contain(Tokens.Type("T", 5, 79));
tokens.should.contain(Tokens.Type("ICar", 5, 83));
tokens.should.contain(Tokens.Keyword("new", 5, 89));
tokens.should.contain(Tokens.Keyword("where", 5, 95));
tokens.should.contain(Tokens.Type("X", 5, 101));
tokens.should.contain(Tokens.Keyword("struct", 5, 105));

});

Expand Down
114 changes: 114 additions & 0 deletions test/syntaxes/field.tests.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
import { should } from 'chai';
import { Tokens, Token } from './utils/tokenizer';
import { TokenizerUtil } from'./utils/tokenizerUtil';

describe("Grammar", function() {
before(function() {
should();
});

describe("Field", function() {
it("declaration", function() {

const input = `
public class Tester
{
private List _field;
private List field;
private List field123;
}`;

let tokens: Token[] = TokenizerUtil.tokenize(input);

tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5));
tokens.should.contain(Tokens.Type("List", 4, 13));
tokens.should.contain(Tokens.FieldIdentifier("_field", 4, 18));

tokens.should.contain(Tokens.FieldIdentifier("field", 5, 18));
tokens.should.contain(Tokens.FieldIdentifier("field123", 6, 18));
});


it("modifiers", function() {

const input = `
public class Tester
{
private static readonly List _field;
readonly string _field;
}`;

let tokens: Token[] = TokenizerUtil.tokenize(input);

tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5));
tokens.should.contain(Tokens.StorageModifierKeyword("static", 4, 13));
tokens.should.contain(Tokens.StorageModifierKeyword("readonly", 4, 20));
tokens.should.contain(Tokens.Type("List", 4, 29));
tokens.should.contain(Tokens.FieldIdentifier("_field", 4, 34));
});

it("types", function() {

const input = `
public class Tester
{
string field123;
string[] field123;
}`;

let tokens: Token[] = TokenizerUtil.tokenize(input);

tokens.should.contain(Tokens.Type("string", 4, 5));
tokens.should.contain(Tokens.FieldIdentifier("field123", 4, 12));

tokens.should.contain(Tokens.Type("string[]", 5, 5));
tokens.should.contain(Tokens.FieldIdentifier("field123", 5, 14));
});

it("assignment", function() {

const input = `
public class Tester
{
private string field = "hello";
const bool field = true;
}`;

let tokens: Token[] = TokenizerUtil.tokenize(input);

tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5));
tokens.should.contain(Tokens.Type("string", 4, 13));
tokens.should.contain(Tokens.FieldIdentifier("field", 4, 20));
tokens.should.contain(Tokens.StringQuoted("hello", 4, 29));

tokens.should.contain(Tokens.StorageModifierKeyword("const", 5, 5));
tokens.should.contain(Tokens.Type("bool", 5, 13));
tokens.should.contain(Tokens.FieldIdentifier("field", 5, 20));
tokens.should.contain(Tokens.LanguageConstant("true", 5, 28));
});

it("expression body", function() {

const input = `
public class Tester
{
private string field => "hello";
const bool field => true;
}`;

let tokens: Token[] = TokenizerUtil.tokenize(input);

tokens.should.contain(Tokens.StorageModifierKeyword("private", 4, 5));
tokens.should.contain(Tokens.Type("string", 4, 13));
tokens.should.contain(Tokens.FieldIdentifier("field", 4, 20));
tokens.should.contain(Tokens.StringQuoted("hello", 4, 30));

tokens.should.contain(Tokens.StorageModifierKeyword("const", 5, 5));
tokens.should.contain(Tokens.Type("bool", 5, 13));
tokens.should.contain(Tokens.FieldIdentifier("field", 5, 20));
tokens.should.contain(Tokens.LanguageConstant("true", 5, 29));
});
});
});


10 changes: 9 additions & 1 deletion test/syntaxes/utils/tokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,5 +78,13 @@ export namespace Tokens {

export const Keyword = (text: string, line?: number, column?: number) =>
createToken(text, "keyword.other.cs", line, column);
}

export const FieldIdentifier = (text: string, line?: number, column?: number) =>
createToken(text, "entity.name.variable.cs", line, column);

export const StringQuoted = (text: string, line?: number, column?: number) =>
createToken(text, "string.quoted.double.cs", line, column);

export const LanguageConstant = (text: string, line?: number, column?: number) =>
createToken(text, "constant.language.cs", line, column);
}

0 comments on commit 5ca75df

Please sign in to comment.