diff --git a/src/compiler.js b/src/compiler.js index 374c9504..a19dd34e 100644 --- a/src/compiler.js +++ b/src/compiler.js @@ -298,6 +298,9 @@ var Compiler = Object.extend({ val = val.replace(/\t/g, '\\t'); this.emit('"' + val + '"'); } + else if (node.value === null) { + this.emit('null'); + } else { this.emit(node.value.toString()); } diff --git a/src/lexer.js b/src/lexer.js index 3ab45758..301c21ef 100644 --- a/src/lexer.js +++ b/src/lexer.js @@ -35,6 +35,7 @@ var TOKEN_PIPE = 'pipe'; var TOKEN_INT = 'int'; var TOKEN_FLOAT = 'float'; var TOKEN_BOOLEAN = 'boolean'; +var TOKEN_NONE = 'none'; var TOKEN_SYMBOL = 'symbol'; var TOKEN_SPECIAL = 'special'; var TOKEN_REGEX = 'regex'; @@ -195,6 +196,9 @@ Tokenizer.prototype.nextToken = function() { else if(tok.match(/^(true|false)$/)) { return token(TOKEN_BOOLEAN, tok, lineno, colno); } + else if(tok === 'none') { + return token(TOKEN_NONE, tok, lineno, colno); + } else if(tok) { return token(TOKEN_SYMBOL, tok, lineno, colno); } @@ -493,6 +497,7 @@ module.exports = { TOKEN_INT: TOKEN_INT, TOKEN_FLOAT: TOKEN_FLOAT, TOKEN_BOOLEAN: TOKEN_BOOLEAN, + TOKEN_NONE: TOKEN_NONE, TOKEN_SYMBOL: TOKEN_SYMBOL, TOKEN_SPECIAL: TOKEN_SPECIAL, TOKEN_REGEX: TOKEN_REGEX diff --git a/src/parser.js b/src/parser.js index 5835f497..06e911d7 100644 --- a/src/parser.js +++ b/src/parser.js @@ -891,7 +891,7 @@ var Parser = Object.extend({ parsePrimary: function (noPostfix) { var tok = this.nextToken(); - var val = null; + var val; var node = null; if(!tok) { @@ -919,11 +919,14 @@ var Parser = Object.extend({ tok.colno); } } + else if(tok.type === lexer.TOKEN_NONE) { + val = null; + } else if (tok.type === lexer.TOKEN_REGEX) { val = new RegExp(tok.value.body, tok.value.flags); } - if(val !== null) { + if(val !== undefined) { node = new nodes.Literal(tok.lineno, tok.colno, val); } else if(tok.type === lexer.TOKEN_SYMBOL) { diff --git a/tests/compiler.js b/tests/compiler.js index 9bf559d7..293ff9ad 100644 --- a/tests/compiler.js +++ b/tests/compiler.js @@ -67,6 +67,21 @@ finish(done); }); + it('should display none as empty string', function(done) { + equal('{{ none }}', ''); + finish(done); + }); + + it('should compile none as falsy', function(done) { + equal('{% if not none %}yes{% endif %}', 'yes'); + finish(done); + }); + + it('should compile none as null, not undefined', function(done) { + equal('{{ none|default("d", false) }}', ''); + finish(done); + }); + it('should compile function calls', function(done) { equal('{{ foo("msg") }}', { foo: function(str) { return str + 'hi'; }}, diff --git a/tests/lexer.js b/tests/lexer.js index f84b1f90..9316b501 100644 --- a/tests/lexer.js +++ b/tests/lexer.js @@ -148,13 +148,14 @@ }); it('should parse basic types', function() { - tokens = lexer.lex('{{ 3 4.5 true false foo "hello" \'boo\' r/regex/ }}'); + tokens = lexer.lex('{{ 3 4.5 true false none foo "hello" \'boo\' r/regex/ }}'); hasTokens(tokens, lexer.TOKEN_VARIABLE_START, lexer.TOKEN_INT, lexer.TOKEN_FLOAT, lexer.TOKEN_BOOLEAN, lexer.TOKEN_BOOLEAN, + lexer.TOKEN_NONE, lexer.TOKEN_SYMBOL, lexer.TOKEN_STRING, lexer.TOKEN_STRING, diff --git a/tests/parser.js b/tests/parser.js index d5e58cfa..e287b99a 100644 --- a/tests/parser.js +++ b/tests/parser.js @@ -156,6 +156,11 @@ [nodes.Output, [nodes.Literal, false]]]); + isAST(parser.parse('{{ none }}'), + [nodes.Root, + [nodes.Output, + [nodes.Literal, null]]]); + isAST(parser.parse('{{ foo }}'), [nodes.Root, [nodes.Output, diff --git a/tests/runtime.js b/tests/runtime.js index 2389f982..8a7e6d43 100644 --- a/tests/runtime.js +++ b/tests/runtime.js @@ -58,7 +58,7 @@ it('should allow for undefined macro arguments in the last position', function(done) { render('{% macro foo(bar, baz) %}' + '{{ bar }} {{ baz }}{% endmacro %}' + - '{{ foo("hello", none) }}', + '{{ foo("hello", nosuchvar) }}', {}, { noThrow: true }, function(err, res) {