Skip to content

Commit

Permalink
Merge pull request #132 from JelleInfinity/pr
Browse files Browse the repository at this point in the history
Pr
  • Loading branch information
duckdoom4 authored May 31, 2022
2 parents 073f9af + a4d3529 commit dbc2048
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 21 deletions.
11 changes: 8 additions & 3 deletions src/tokenizer/token-patterns.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
/* eslint-disable no-useless-escape */
/* eslint-disable @typescript-eslint/no-non-null-assertion */
/* eslint-disable no-useless-backreference */

// These patterns are converted from the tmLanguage file.
// Copy the patterns (the contents of the repository group) over and apply the following find and replace patterns:

Expand Down Expand Up @@ -29,6 +32,8 @@
// Result should be manually fixed
// Make sure to include this in internal captures to detect all newline tokens

const lineContinuationPattern = /^(?!$|#)(?=(?!\1) *[^ \t#]|\1[^ \t#])|\Z/gm;

const newLine: TokenPattern = {
token: CharacterTokenType.NewLine,
match: /\r\n|\r|\n/g,
Expand Down Expand Up @@ -201,7 +206,7 @@ const stringTags: TokenPattern = {
{
// Valid tags with numeric params (self-closing)
token: MetaTokenType.TagBlock,
match: /({)\s*(p|w)(=)(\+?)(\d+(?:.\d+)?)\s*(})/dg,
match: /({)\s*(p|w)(=)(\+)?(\d+(?:.\d+)?)\s*(})/dg,
captures: {
0: { token: MetaTokenType.TagBlock },
1: { token: CharacterTokenType.OpenBracket },
Expand All @@ -215,7 +220,7 @@ const stringTags: TokenPattern = {
{
// Valid tags with numeric params (self-closing)
token: MetaTokenType.TagBlock,
match: /({)\s*(v?space)(=)(\+?)(\d+)\s*(})/dg,
match: /({)\s*(v?space)(=)(\+)?(\d+)\s*(})/dg,
captures: {
0: { token: MetaTokenType.TagBlock },
1: { token: CharacterTokenType.OpenBracket },
Expand Down Expand Up @@ -637,7 +642,7 @@ const pythonStatements: TokenPattern = {

const label: TokenPattern = {
token: MetaTokenType.Block,
match: /^[ \t]*(label)[ \t]+(.*)(:)/dgm,
match: /^[ \t]*(label)[ \t]+(.+)?(:)/dgm,
captures: {
0: { patterns: [whiteSpace] },
1: { token: KeywordTokenType.Label },
Expand Down
13 changes: 13 additions & 0 deletions src/tokenizer/tokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ function setupAndValidatePatterns() {
const stack = new Stack<TokenizerTokenPattern>(32);
stack.push(basePatterns as ExTokenRepoPattern);

const mFlagRe = /[\^$]/g;
while (!stack.isEmpty()) {
const p = stack.pop()!;

Expand All @@ -94,6 +95,9 @@ function setupAndValidatePatterns() {
for (let i = 0; i < p.patterns.length; ++i) stack.push(p.patterns[i]);
} else if (isRangePattern(p)) {
p._patternType = TokenPatternType.RangePattern;
if (p.begin.source.match(mFlagRe)) {
assert(p.begin.multiline, "To match this pattern the 'm' flag is required on the begin RegExp!");
}

assert(p.begin.global && p.end.global, "To match this pattern the 'g' flag is required on the begin and end RegExp!");
if (p.beginCaptures) {
Expand Down Expand Up @@ -125,13 +129,22 @@ function setupAndValidatePatterns() {
}

let reEndSource = p.end.source;

if (reEndSource.match(mFlagRe)) {
assert(p.begin.multiline, "To match this pattern the 'm' flag is required on the end RegExp!");
}

p._hasBackref = /\\\d+/.test(reEndSource);
//reEndSource = reEndSource.replaceAll("\\A", "¨0");
reEndSource = reEndSource.replaceAll("\\Z", "$(?!\r\n|\r|\n)"); // This assumes (CR)LF without trailig new line right?...
p.end = new RegExp(reEndSource, p.end.flags);
} else if (isMatchPattern(p)) {
p._patternType = TokenPatternType.MatchPattern;

if (p.match.source.match(mFlagRe)) {
assert(p.match.multiline, "To match this pattern the 'm' flag is required!");
}

assert(p.match.global, "To match this pattern the 'g' flag is required!");
if (p.captures) {
assert(p.match.hasIndices, "To match this pattern the 'd' flag is required!");
Expand Down
33 changes: 15 additions & 18 deletions syntaxes/renpy.tmLanguage_2.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
},
"7": { "name": "punctuation.section.python.begin.renpy" }
},
"end": "(?:(?<!\\1|[^ \\t])|^)(?=(?:[ \\t]|^)(?!$|#)[^ \\t])",
"end": "^(?!$|#)(?=(?!\\1) *[^ \\t#]|\\1[^ \\t#])",
"patterns": [{ "include": "source.python" }]
},
{
Expand Down Expand Up @@ -80,33 +80,30 @@
"patterns": [
{
"name": "meta.label.renpy",
"begin": "(^[ \\t]+)?\\b(label)\\s+([a-zA-Z_.]\\w*(?:\\(.*\\))?)(?=\\s*)(:)",
"beginCaptures": {
"match": "^[ \\t]*(label)[ \t]+(.*)(:)",
"captures": {
"1": {
"name": "punctuation.whitespace.label.leading.renpy"
"name": "keyword.renpy"
},
"2": {
"name": "keyword.renpy storage.type.function.renpy"
},
"3": {
"name": "meta.label.renpy",
"name": "storage.type.function.renpy meta.embedded.line.python",
"patterns": [
{
"comment": "Function name",
"match": "([a-zA-Z_.]\\w*)",
"match": "[a-zA-Z_.]\\w*",
"name": "entity.name.function.renpy"
},
{ "include": "source.python#parameters" }
{
"match": "\\(.*\\)",
"captures": {
"0": {
"patterns": [{ "include": "source.python#parameters" }]
}
}
}
]
},
"4": {
"name": "punctuation.section.label.begin.renpy"
}
},

"end": "(:|(?=[#'\"\\n]))",
"endCaptures": {
"1": {
"3": {
"name": "punctuation.section.label.begin.renpy"
}
}
Expand Down

0 comments on commit dbc2048

Please sign in to comment.