Skip to content

Commit

Permalink
extending tokens...?
Browse files Browse the repository at this point in the history
  • Loading branch information
Jonak-Adipta-Kalita committed Apr 7, 2023
1 parent 79ee532 commit aff0591
Show file tree
Hide file tree
Showing 4 changed files with 78 additions and 62 deletions.
7 changes: 6 additions & 1 deletion example.jonak
Original file line number Diff line number Diff line change
@@ -1,2 +1,7 @@
print("Hello World");
print((2 + 4 - 6 * 9) / 2);

var helloWorld = fn(hello, world) {
return hello + " " + world;
};

print(helloWorld("Hello!", "World!"));
34 changes: 19 additions & 15 deletions lexer/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,45 +35,49 @@ func (l *Lexer) NextToken() token.Token {

switch l.ch {
case '=':
tok = newToken(token.TT_ASSIGN, l.ch)
tok = newToken(token.ASSIGN, l.ch)
case ';':
tok = newToken(token.TT_SEMICOLON, l.ch)
tok = newToken(token.SEMICOLON, l.ch)
case '(':
tok = newToken(token.TT_LPAREN, l.ch)
tok = newToken(token.LPAREN, l.ch)
case ')':
tok = newToken(token.TT_RPAREN, l.ch)
tok = newToken(token.RPAREN, l.ch)
case ',':
tok = newToken(token.TT_COMMA, l.ch)
tok = newToken(token.COMMA, l.ch)
case '+':
tok = newToken(token.TT_PLUS, l.ch)
tok = newToken(token.PLUS, l.ch)
case '-':
tok = newToken(token.TT_MINUS, l.ch)
tok = newToken(token.MINUS, l.ch)
case '!':
tok = newToken(token.TT_BANG, l.ch)
tok = newToken(token.BANG, l.ch)
case '*':
tok = newToken(token.TT_ASTER, l.ch)
tok = newToken(token.ASTER, l.ch)
case '/':
tok = newToken(token.TT_SLASH, l.ch)
tok = newToken(token.SLASH, l.ch)
case '<':
tok = newToken(token.LT, l.ch)
case '>':
tok = newToken(token.GT, l.ch)
case '{':
tok = newToken(token.TT_LBRACE, l.ch)
tok = newToken(token.LBRACE, l.ch)
case '}':
tok = newToken(token.TT_RBRACE, l.ch)
tok = newToken(token.RBRACE, l.ch)
case 0:
tok.Literal = ""
tok.Type = token.TT_EOF
tok.Type = token.EOF
default:
if isLetter(l.ch) {
tok.Literal = l.readIdentifier()
tok.Type = token.LookupIdent(tok.Literal)

return tok
} else if isDigit(l.ch) {
tok.Type = token.TT_INT
tok.Type = token.INT
tok.Literal = l.readNumber()

return tok
} else {
tok = newToken(token.TT_ILLEGAL, l.ch)
tok = newToken(token.ILLEGAL, l.ch)
}
}

Expand Down
38 changes: 21 additions & 17 deletions lexer/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,28 +7,32 @@ import (
)

func TestNextToken(t *testing.T) {
input := `print((2 + 4 - 6 * 9) / 2);`
input := `print(((2 + 4 - 6 * 9) / 2) < 2);`

tests := []struct {
expectedType token.TokenType
expectedLiteral string
}{
{token.TT_PRINT, "print"},
{token.TT_LPAREN, "("},
{token.TT_LPAREN, "("},
{token.TT_INT, "2"},
{token.TT_PLUS, "+"},
{token.TT_INT, "4"},
{token.TT_MINUS, "-"},
{token.TT_INT, "6"},
{token.TT_ASTER, "*"},
{token.TT_INT, "9"},
{token.TT_RPAREN, ")"},
{token.TT_SLASH, "/"},
{token.TT_INT, "2"},
{token.TT_RPAREN, ")"},
{token.TT_SEMICOLON, ";"},
{token.TT_EOF, ""},
{token.PRINT, "print"},
{token.LPAREN, "("},
{token.LPAREN, "("},
{token.LPAREN, "("},
{token.INT, "2"},
{token.PLUS, "+"},
{token.INT, "4"},
{token.MINUS, "-"},
{token.INT, "6"},
{token.ASTER, "*"},
{token.INT, "9"},
{token.RPAREN, ")"},
{token.SLASH, "/"},
{token.INT, "2"},
{token.RPAREN, ")"},
{token.LT, "<"},
{token.INT, "2"},
{token.RPAREN, ")"},
{token.SEMICOLON, ";"},
{token.EOF, ""},
}

l := New(input)
Expand Down
61 changes: 32 additions & 29 deletions token/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,42 +7,45 @@ type Token struct {
}

const (
TT_ILLEGAL = "ILLEGAL"
TT_EOF = "EOF"

TT_IDENTIFIER = "IDENTIFIER"
TT_INT = "INT"
TT_STRING = "STRING"
TT_BOOL = "BOOL"

TT_ASSIGN = "="
TT_PLUS = "+"
TT_MINUS = "-"
TT_BANG = "!"
TT_ASTER = "*"
TT_SLASH = "/"

TT_COMMA = ","
TT_SEMICOLON = ";"
TT_LPAREN = "("
TT_RPAREN = ")"
TT_LBRACE = "{"
TT_RBRACE = "}"

TT_FUNCTION = "FUNCTION"
TT_VAR = "VAR"
TT_PRINT = "PRINT"
ILLEGAL = "ILLEGAL"
EOF = "EOF"

IDENTIFIER = "IDENTIFIER"
INT = "INT"
STRING = "STRING"
BOOL = "BOOL"

ASSIGN = "="
PLUS = "+"
MINUS = "-"
BANG = "!"
ASTER = "*"
SLASH = "/"

LT = "<"
GT = ">"

COMMA = ","
SEMICOLON = ";"
LPAREN = "("
RPAREN = ")"
LBRACE = "{"
RBRACE = "}"

FUNCTION = "FUNCTION"
VAR = "VAR"
PRINT = "PRINT"
)

var keywords = map[string]TokenType{
"fn": TT_FUNCTION,
"var": TT_VAR,
"print": TT_PRINT,
"fn": FUNCTION,
"var": VAR,
"print": PRINT,
}

func LookupIdent(ident string) TokenType {
if tok, ok := keywords[ident]; ok {
return tok
}
return TT_IDENTIFIER
return IDENTIFIER
}

0 comments on commit aff0591

Please sign in to comment.