Skip to content
Merged
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@
# already existing elements were commented out

#/target
/tmp
5 changes: 2 additions & 3 deletions program.trv
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,18 @@

func is_greater_than_44(params : Integer) -> Integer {
let x : Integer = params;
if x > 44 is True then {
if x > 44 then {
1
}
else {
0
}
}


func otherFunction() -> Boolean {
let number : Integer = 20;
let is_greater : Integer = is_greater_than_44(number);
if is_greater == 0 is True then {
if is_greater == 0 then {
print("Didnt work the first time!");
}
while is_greater == 0 do {
Expand Down
13 changes: 13 additions & 0 deletions simple.trv
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
func main() -> Integer {
let num : Integer = 0;

while num < 10 do {
num = 11;
}
if num > 10 {
num = 11;
} else {
num = 11;
}
return num;
}
97 changes: 52 additions & 45 deletions src/lexer/lexer.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use std::{any::Any, string};

use crate::lexer::token::{Token, TokenType};


Expand Down Expand Up @@ -42,50 +40,54 @@ impl Lexer {
tokens.push(self.assign_or_equals());
}
':' => {
tokens.push(self.simple_token(TokenType::Colon));
tokens.push(self.simple_token(ch.to_string(),TokenType::Colon));
}
'+' => {
tokens.push(self.simple_token(TokenType::Plus));
tokens.push(self.simple_token(ch.to_string(),TokenType::Plus));
}
'-' => {
tokens.push(self.minus_or_arrow());
}
'*' => {
tokens.push(self.simple_token(TokenType::Multiply));
tokens.push(self.simple_token(ch.to_string(),TokenType::Multiply));
}
'/' => {
tokens.push(self.simple_token(ch.to_string(),TokenType::Division));
}
'{' => {
tokens.push(self.simple_token(TokenType::LeftBrace));
tokens.push(self.simple_token(ch.to_string(),TokenType::LeftBrace));
}
'}' => {
tokens.push(self.simple_token(TokenType::RightBrace));
tokens.push(self.simple_token(ch.to_string(),TokenType::RightBrace));
}
'(' => {
tokens.push(self.simple_token(TokenType::LeftParen));
tokens.push(self.simple_token(ch.to_string(),TokenType::LeftParen));
}
')' => {
tokens.push(self.simple_token(TokenType::RightParen));
tokens.push(self.simple_token(ch.to_string(),TokenType::RightParen));
}
'>' =>{
tokens.push(self.simple_token(TokenType::GreaterThan));
tokens.push(self.simple_token(ch.to_string(),TokenType::GreaterThan));
}
'<' =>{
tokens.push(self.simple_token(TokenType::LessThan));
tokens.push(self.simple_token(ch.to_string(),TokenType::LessThan));
}
';' =>{
tokens.push(self.simple_token(TokenType::Semicolon));
tokens.push(self.simple_token(ch.to_string(),TokenType::Semicolon));
}
'"' => {
tokens.push(self.read_string_literal());
}
',' => {
tokens.push(self.simple_token(TokenType::Comma));
tokens.push(self.simple_token(ch.to_string(),TokenType::Comma));
}
'#' => {
self.read_comment();
}
_ => panic!("Suuuper wrongdog in here, unexpected char '{}' at {}:{}", ch, self.line, self.column),
}
}
tokens.push(self.simple_token("EOF".to_string(),TokenType::EOF));
tokens
}
fn current_char(&self) -> Option<char> {
Expand All @@ -103,28 +105,29 @@ impl Lexer {
self.column = 1;
}

fn simple_token(&mut self, token_type: TokenType) -> Token {
fn simple_token(&mut self,value: String, token_type: TokenType) -> Token {
let start_col_num = self.column;
self.advance();
Token::new(token_type, self.line, start_col_num)
Token::new(value, token_type, self.line, start_col_num)

}
fn assign_or_equals(&mut self) -> Token{
let original_col = self.column;
self.advance();
if self.current_char().unwrap() == '=' {
Token::new(TokenType::Equals, self.line, original_col)
Token::new("=".to_string(),TokenType::Equals, self.line, original_col)
} else {
Token::new(TokenType::Assign, self.line, original_col)
Token::new("==".to_string(),TokenType::Assign, self.line, original_col)
}
}
fn minus_or_arrow(&mut self) -> Token {
let original_col = self.column;
self.advance();
if self.current_char().unwrap() == '>' {
Token::new(TokenType::Arrow, self.line, original_col)
self.advance();
Token::new("->".to_string(),TokenType::Arrow, self.line, original_col)
} else {
Token::new(TokenType::Minus, self.line, original_col)
Token::new("-".to_string(),TokenType::Minus, self.line, original_col)
}
}
fn read_comment(&mut self) {
Expand Down Expand Up @@ -156,7 +159,7 @@ impl Lexer {
}
}
let num = num_string.parse::<i64>().unwrap();
Token::new(TokenType::IntegerLiteral(num), self.line, start_col_num)
Token::new(num_string, TokenType::IntegerLiteral, self.line, start_col_num)
}

fn read_string_literal(&mut self) -> Token{
Expand All @@ -177,7 +180,7 @@ impl Lexer {
}
}
}
Token::new(TokenType::StringLiteral(the_litteral), self.line, start_col_num)
Token::new(the_litteral.clone(), TokenType::StringLiteral, self.line, start_col_num)
}

fn read_identifier(&mut self, first_ch: char) -> Token {
Expand All @@ -200,22 +203,22 @@ impl Lexer {
}
fn give_keyword_or_literal_token(&mut self, name: &str, line: usize, col: usize) -> Token{
match name {
"let" => Token::new(TokenType::Let, line, col),
"func" => Token::new(TokenType::Func, line, col),
"if" => Token::new(TokenType::If, line, col),
"then" => Token::new(TokenType::Then, line, col),
"else" => Token::new(TokenType::Else, line, col),
"not" => Token::new(TokenType::Not, line, col),
"while" => Token::new(TokenType::While, line, col),
"print" => Token::new(TokenType::Print, line, col),
"do" => Token::new(TokenType::Do, line, col),
"is" => Token::new(TokenType::Is, line, col),
"Integer"=> Token::new(TokenType::Integer, line, col),
"Boolean"=> Token::new(TokenType::Boolean, line, col),
"True" => Token::new(TokenType::True, line, col),
"False" => Token::new(TokenType::False, line, col),
"Eof" => Token::new(TokenType::Eof, line, col),
_ => Token::new(TokenType::Identifier(name.to_string()), line, col),
"let" => Token::new("let".to_string(), TokenType::Let, line, col),
"func" => Token::new("func".to_string(), TokenType::Func, line, col),
"if" => Token::new("if".to_string(), TokenType::If, line, col),
"then" => Token::new("then".to_string(), TokenType::Then, line, col),
"else" => Token::new("else".to_string(), TokenType::Else, line, col),
"not" => Token::new("not".to_string(), TokenType::Not, line, col),
"while" => Token::new("while".to_string(), TokenType::While, line, col),
"print" => Token::new("print".to_string(), TokenType::Print, line, col),
"do" => Token::new("do".to_string(), TokenType::Do, line, col),
"is" => Token::new("is".to_string(), TokenType::Is, line, col),
"Integer"=> Token::new("Integer".to_string(), TokenType::Integer, line, col),
"Boolean"=> Token::new("Boolean".to_string(), TokenType::Boolean, line, col),
"return"=> Token::new("Return".to_string(), TokenType::Return, line, col),
"True" => Token::new("True".to_string(), TokenType::BooleanLiteral, line, col),
"False" => Token::new("False".to_string(), TokenType::BooleanLiteral, line, col),
_ => Token::new(name.to_string(), TokenType::Identifier, line, col),
}
}
}
Expand Down Expand Up @@ -248,9 +251,10 @@ mod tests{
let actual_token_vec: Vec<Token> = lex.tokenize();

let expected: Vec<Token> = vec![
Token::new(TokenType::Identifier("abc_def".to_string()), 1, 1),
Token::new(TokenType::Assign, 1, 9),
Token::new(TokenType::IntegerLiteral(2), 1, 11),
Token::new("abc_def".to_string(), TokenType::Identifier, 1, 1),
Token::new("=".to_string(), TokenType::Assign, 1, 9),
Token::new(2.to_string(), TokenType::IntegerLiteral, 1, 11),
Token::new("EOF".to_string(), TokenType::EOF, 1, 12),
];

assert_eq!(actual_token_vec, expected);
Expand All @@ -259,14 +263,16 @@ mod tests{
fn reading_comments_tokenize_lexer_line_col_are_correct(){
let mut lex: Lexer = Lexer::new("#abc_def = 2\n".to_string());
lex.tokenize();
assert_eq!((lex.line, lex.column), (2,1));
assert_eq!((lex.line, lex.column), (2,2));
}
#[test]
fn reading_comments_tokenize_returns_empty_vector(){
fn reading_comments_tokenize_returns_eof_vector(){
let mut lex: Lexer = Lexer::new("#abc_def = 2\n".to_string());
let actual_token_vec: Vec<Token> = lex.tokenize();

let expected: Vec<Token> = vec![];
let expected: Vec<Token> = vec![
Token::new("EOF".to_string(), TokenType::EOF, 2, 1)
];

assert_eq!(actual_token_vec, expected);
}
Expand All @@ -276,9 +282,10 @@ mod tests{
let actual_token_vec: Vec<Token> = lex.tokenize();

let expected: Vec<Token> = vec![
Token::new(TokenType::StringLiteral("\"test\"".to_string()), 1, 1)
Token::new("\"test\"".to_string(), TokenType::StringLiteral, 1, 1),
Token::new("EOF".to_string(), TokenType::EOF, 1, 7)
];

assert_eq!(actual_token_vec, expected);
}
}
}
84 changes: 75 additions & 9 deletions src/lexer/token.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::fmt;

#[derive(Debug, Clone, PartialEq)]
pub enum TokenType {
// Keywords
Expand All @@ -17,13 +19,12 @@ pub enum TokenType {
Boolean,

// Literals
True,
False,
IntegerLiteral(i64),
StringLiteral(String),
BooleanLiteral,
IntegerLiteral,
StringLiteral,

// Identifiers
Identifier(String),
Identifier,

// Operators
Colon, // :
Expand All @@ -35,6 +36,7 @@ pub enum TokenType {
Plus, // +
Minus, // -
Multiply, // *
Division,

// Punctuation
LeftParen, // (
Expand All @@ -43,27 +45,91 @@ pub enum TokenType {
RightBrace, // }
Comma, // ,
Semicolon, // ;

// Special
Eof,
//special
Return,
EOF, // End of file
}

#[derive(Debug, Clone)]
pub struct Token {
pub value: String,
pub token_type: TokenType,
pub line: usize,
pub column: usize,
}

impl Token {
pub fn new(token_type: TokenType, line: usize, column: usize) -> Self {
pub fn new(value: String,token_type: TokenType, line: usize, column: usize) -> Self {
Token {
value:value,
token_type,
line,
column,
}
}
}
impl fmt::Display for TokenType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
// Keywords
TokenType::Func => write!(f, "func"),
TokenType::Let => write!(f, "let"),
TokenType::If => write!(f, "if"),
TokenType::Then => write!(f, "then"),
TokenType::Else => write!(f, "else"),
TokenType::Not => write!(f, "not"),
TokenType::While => write!(f, "while"),
TokenType::Print => write!(f, "print"),
TokenType::Do => write!(f, "do"),
TokenType::Is => write!(f, "is"),

// Types
TokenType::Integer => write!(f, "Integer"),
TokenType::Boolean => write!(f, "Boolean"),

// Literals
TokenType::BooleanLiteral => write!(f, "BooleanLiteral"),
TokenType::IntegerLiteral => write!(f, "IntegerLiteral"),
TokenType::StringLiteral => write!(f, "StringLiteral"),

// Identifiers
TokenType::Identifier => write!(f, "Identifier"),

// Operators
TokenType::Colon => write!(f, ":"),
TokenType::Arrow => write!(f, "->"),
TokenType::Assign => write!(f, "="),
TokenType::GreaterThan => write!(f, ">"),
TokenType::LessThan => write!(f, "<"),
TokenType::Equals => write!(f, "=="),
TokenType::Plus => write!(f, "+"),
TokenType::Minus => write!(f, "-"),
TokenType::Multiply => write!(f, "*"),
TokenType::Division => write!(f, "/"),


// Punctuation
TokenType::LeftParen => write!(f, "("),
TokenType::RightParen => write!(f, ")"),
TokenType::LeftBrace => write!(f, "{{"),
TokenType::RightBrace => write!(f, "}}"),
TokenType::Comma => write!(f, ","),
TokenType::Semicolon => write!(f, ";"),
TokenType::EOF => write!(f, "EOF"),
TokenType::Return => write!(f, "Return"),
}
}
}

impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{} at {}:{}",
self.token_type, self.line, self.column
)
}
}
impl PartialEq for Token {
fn eq(&self, other: &Self) -> bool {
self.token_type == other.token_type &&
Expand Down
Loading