- The lexer can now peek forward into the input stream.

- Added the rest of the basic math operators.
- Added > and <.
- Test, Test, Test.
- Added if, else, return, true, false.
- Lexer can now differentiate between '=' and '==' and '!' and '!='.

git-svn-id: https://svn.tlawal.org/svn/monkey@2 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
This commit is contained in:
Tijani Lawal 2022-05-12 22:04:29 +00:00
parent 44d9b2c7ec
commit cebc052f32
3 changed files with 115 additions and 8 deletions

View File

@ -29,6 +29,14 @@ func (lexer *Lexer) read_char() {
lexer.read_position += 1 lexer.read_position += 1
} }
func (lexer *Lexer) peek_char() byte {
if lexer.read_position >= len(lexer.input) {
return 0
} else {
return lexer.input[lexer.read_position]
}
}
func (lexer *Lexer) read_identifier() string { func (lexer *Lexer) read_identifier() string {
position := lexer.position position := lexer.position
for is_letter(lexer.current_char) { for is_letter(lexer.current_char) {
@ -65,7 +73,23 @@ func (lexer *Lexer) next_token() token.Token {
switch lexer.current_char { switch lexer.current_char {
case '=': case '=':
if lexer.peek_char() == '=' {
ch := lexer.current_char
lexer.read_char()
literal := string(ch) + string(lexer.current_char)
tok = token.Token{Type: token.EQ, Literal: literal}
} else {
tok = new_token(token.ASSIGN, lexer.current_char) tok = new_token(token.ASSIGN, lexer.current_char)
}
case '!':
if lexer.peek_char() == '=' {
ch := lexer.current_char
lexer.read_char()
literal := string(ch) + string(lexer.current_char)
tok = token.Token{Type: token.NOT_EQ, Literal: literal}
} else {
tok = new_token(token.BANG, lexer.current_char)
}
case ';': case ';':
tok = new_token(token.SEMICOLON, lexer.current_char) tok = new_token(token.SEMICOLON, lexer.current_char)
case '(': case '(':
@ -80,6 +104,16 @@ func (lexer *Lexer) next_token() token.Token {
tok = new_token(token.COMMA, lexer.current_char) tok = new_token(token.COMMA, lexer.current_char)
case '+': case '+':
tok = new_token(token.PLUS, lexer.current_char) tok = new_token(token.PLUS, lexer.current_char)
case '-':
tok = new_token(token.MINUS, lexer.current_char)
case '/':
tok = new_token(token.SLASH, lexer.current_char)
case '*':
tok = new_token(token.ASTERISK, lexer.current_char)
case '<':
tok = new_token(token.LT, lexer.current_char)
case '>':
tok = new_token(token.GT, lexer.current_char)
case 0: case 0:
tok.Literal = "" tok.Literal = ""
tok.Type = token.EOF tok.Type = token.EOF

View File

@ -13,6 +13,18 @@ func TestNextToken(t *testing.T) {
x + y; x + y;
}; };
let result = add(five, ten); let result = add(five, ten);
!-/*5;
5 < 10 > 5;
if(5 < 10){
return true;
} else {
return false;
}
10 == 10;
10 != 9;
` `
tests := []struct { tests := []struct {
expectedType token.TokenType expectedType token.TokenType
@ -57,6 +69,48 @@ func TestNextToken(t *testing.T) {
{token.IDENT, "ten"}, {token.IDENT, "ten"},
{token.RPAREN, ")"}, {token.RPAREN, ")"},
{token.SEMICOLON, ";"}, {token.SEMICOLON, ";"},
{token.BANG, "!"},
{token.MINUS, "-"},
{token.SLASH, "/"},
{token.ASTERISK, "*"},
{token.INT, "5"},
{token.SEMICOLON, ";"},
{token.INT, "5"},
{token.LT, "<"},
{token.INT, "10"},
{token.GT, ">"},
{token.INT, "5"},
{token.SEMICOLON, ";"},
{token.IF, "if"},
{token.LPAREN, "("},
{token.INT, "5"},
{token.LT, "<"},
{token.INT, "10"},
{token.RPAREN, ")"},
{token.LBRACE, "{"},
{token.RETURN, "return"},
{token.TRUE, "true"},
{token.SEMICOLON, ";"},
{token.RBRACE, "}"},
{token.ELSE, "else"},
{token.LBRACE, "{"},
{token.RETURN, "return"},
{token.FALSE, "false"},
{token.SEMICOLON, ";"},
{token.RBRACE, "}"},
{token.INT, "10"},
{token.EQ, "=="},
{token.INT, "10"},
{token.SEMICOLON, ";"},
{token.INT, "10"},
{token.NOT_EQ, "!="},
{token.INT, "9"},
{token.SEMICOLON, ";"},
{token.EOF, ""}, {token.EOF, ""},
} }

View File

@ -18,6 +18,15 @@ const (
// Operators // Operators
ASSIGN = "=" ASSIGN = "="
PLUS = "+" PLUS = "+"
MINUS = "-"
BANG = "!"
ASTERISK = "*"
SLASH = "/"
EQ = "=="
NOT_EQ = "!="
LT = "<"
GT = ">"
// Delimiters // Delimiters
COMMA = "," COMMA = ","
@ -30,11 +39,21 @@ const (
// Keywords // Keywords
FUNCTION = "FUNCTION" FUNCTION = "FUNCTION"
LET = "LET" LET = "LET"
IF = "IF"
ELSE = "ELSE"
TRUE = "TRUE"
FALSE = "FALSE"
RETURN = "RETURN"
) )
var keywords = map[string]TokenType{ var keywords = map[string]TokenType{
"fn": FUNCTION, "fn": FUNCTION,
"let": LET, "let": LET,
"if": IF,
"else": ELSE,
"true": TRUE,
"false": FALSE,
"return": RETURN,
} }
func LookupIdentifier(ident string) TokenType { func LookupIdentifier(ident string) TokenType {