Skip to content
This repository has been archived by the owner on Jun 17, 2020. It is now read-only.

Commit

Permalink
Added REPL
Browse files Browse the repository at this point in the history
  • Loading branch information
sanket143 committed May 6, 2020
1 parent 89e22cb commit 5a525b6
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 42 deletions.
39 changes: 39 additions & 0 deletions src/cli/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
use std::io;
use std::io::Write;

use crate::types::TokenType;
use crate::lexer::def;

pub fn start() {
const PROMPT: &'static str = ">> ";

greet();

loop {
print!("{}", PROMPT);

let _ = io::stdout().flush();

let mut input = String::new();

io::stdin()
.read_line(&mut input)
.expect("Failed to read line");


let mut lex = def::new_lexer(&input);
loop {
let tok = lex.next();
match tok.ttype {
TokenType::EOF => break,
_ => {
println!("{:?}", tok);
}
}
}
}
}

fn greet() {
println!("Hello there! This is the Hi Repl");
}
1 change: 1 addition & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
pub mod lexer;
pub mod types;
pub mod cli;
4 changes: 2 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use hi::lexer;
use hi::cli;

fn main() {
lexer::sanket();
cli::start();
}
56 changes: 16 additions & 40 deletions src/types/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ pub struct Lexer <'a> {
pub ch: u8
}

#[derive(Debug)]
pub struct Token {
pub ttype: TokenType,
pub literal: String
Expand Down Expand Up @@ -79,12 +80,8 @@ impl Lexer <'_> {
_ => new_token(TokenType::ASSIGN, tok)
}
},
b'+' => new_token(
TokenType::PLUS,
tok),
b'-' => new_token(
TokenType::MINUS,
tok),
b'+' => new_token(TokenType::PLUS, tok),
b'-' => new_token(TokenType::MINUS, tok),
b'!' => {
match self.peek() {
b'=' => {
Expand All @@ -94,39 +91,17 @@ impl Lexer <'_> {
_ => new_token(TokenType::BANG, tok)
}
},
b'/' => new_token(
TokenType::SLASH,
tok),
b'*' => new_token(
TokenType::ASTERISK,
tok),
b'<' => new_token(
TokenType::LT,
tok),
b'>' => new_token(
TokenType::GT,
tok),
b';' => new_token(
TokenType::SEMICOLON,
tok),
b'(' => new_token(
TokenType::LPAREN,
tok),
b')' => new_token(
TokenType::RPAREN,
tok),
b',' => new_token(
TokenType::COMMA,
tok),
b'{' => new_token(
TokenType::LBRACE,
tok),
b'}' => new_token(
TokenType::RBRACE,
tok),
0 => new_token(
TokenType::EOF,
String::from("")),
b'/' => new_token(TokenType::SLASH, tok),
b'*' => new_token(TokenType::ASTERISK, tok),
b'<' => new_token(TokenType::LT, tok),
b'>' => new_token(TokenType::GT, tok),
b';' => new_token(TokenType::SEMICOLON, tok),
b'(' => new_token(TokenType::LPAREN, tok),
b')' => new_token(TokenType::RPAREN, tok),
b',' => new_token(TokenType::COMMA, tok),
b'{' => new_token(TokenType::LBRACE, tok),
b'}' => new_token(TokenType::RBRACE, tok),
0 => new_token(TokenType::EOF, String::from("")),
_ => {
if helpers::is_letter(self.ch) {
let ident = self.read_identifier();
Expand All @@ -138,7 +113,8 @@ impl Lexer <'_> {

}

return new_token(TokenType::ILLEGAL, self.ch.to_string());
self.read_char();
return new_token(TokenType::ILLEGAL, tok);
}
};

Expand Down

0 comments on commit 5a525b6

Please sign in to comment.