- The lexer can now peek forward into the input stream.

- Added the rest of the basic math operators.
- Added > and <.
- Test, Test, Test.
- Added if, else, return, true, false.
- Lexer can now differentiate between '=' and '==' and '!' and '!='.

git-svn-id: https://svn.tlawal.org/svn/monkey@2 f6afcba9-9ef1-4bdd-9b72-7484f5705bac
This commit is contained in:
Tijani Lawal 2022-05-12 22:04:29 +00:00
parent 44d9b2c7ec
commit cebc052f32
3 changed files with 115 additions and 8 deletions

View File

@ -29,6 +29,14 @@ func (lexer *Lexer) read_char() {
lexer.read_position += 1
}
func (lexer *Lexer) peek_char() byte {
if lexer.read_position >= len(lexer.input) {
return 0
} else {
return lexer.input[lexer.read_position]
}
}
func (lexer *Lexer) read_identifier() string {
position := lexer.position
for is_letter(lexer.current_char) {
@ -49,13 +57,13 @@ func (lexer *Lexer) skip_whitespace() {
func (lexer *Lexer) read_number() string {
position := lexer.position
for is_digit(lexer.current_char){
for is_digit(lexer.current_char) {
lexer.read_char()
}
return lexer.input[position: lexer.position]
return lexer.input[position:lexer.position]
}
func is_digit(ch byte) bool{
func is_digit(ch byte) bool {
return '0' <= ch && ch <= '9'
}
@ -65,7 +73,23 @@ func (lexer *Lexer) next_token() token.Token {
switch lexer.current_char {
case '=':
tok = new_token(token.ASSIGN, lexer.current_char)
if lexer.peek_char() == '=' {
ch := lexer.current_char
lexer.read_char()
literal := string(ch) + string(lexer.current_char)
tok = token.Token{Type: token.EQ, Literal: literal}
} else {
tok = new_token(token.ASSIGN, lexer.current_char)
}
case '!':
if lexer.peek_char() == '=' {
ch := lexer.current_char
lexer.read_char()
literal := string(ch) + string(lexer.current_char)
tok = token.Token{Type: token.NOT_EQ, Literal: literal}
} else {
tok = new_token(token.BANG, lexer.current_char)
}
case ';':
tok = new_token(token.SEMICOLON, lexer.current_char)
case '(':
@ -80,6 +104,16 @@ func (lexer *Lexer) next_token() token.Token {
tok = new_token(token.COMMA, lexer.current_char)
case '+':
tok = new_token(token.PLUS, lexer.current_char)
case '-':
tok = new_token(token.MINUS, lexer.current_char)
case '/':
tok = new_token(token.SLASH, lexer.current_char)
case '*':
tok = new_token(token.ASTERISK, lexer.current_char)
case '<':
tok = new_token(token.LT, lexer.current_char)
case '>':
tok = new_token(token.GT, lexer.current_char)
case 0:
tok.Literal = ""
tok.Type = token.EOF

View File

@ -13,6 +13,18 @@ func TestNextToken(t *testing.T) {
x + y;
};
let result = add(five, ten);
!-/*5;
5 < 10 > 5;
if(5 < 10){
return true;
} else {
return false;
}
10 == 10;
10 != 9;
`
tests := []struct {
expectedType token.TokenType
@ -57,6 +69,48 @@ func TestNextToken(t *testing.T) {
{token.IDENT, "ten"},
{token.RPAREN, ")"},
{token.SEMICOLON, ";"},
{token.BANG, "!"},
{token.MINUS, "-"},
{token.SLASH, "/"},
{token.ASTERISK, "*"},
{token.INT, "5"},
{token.SEMICOLON, ";"},
{token.INT, "5"},
{token.LT, "<"},
{token.INT, "10"},
{token.GT, ">"},
{token.INT, "5"},
{token.SEMICOLON, ";"},
{token.IF, "if"},
{token.LPAREN, "("},
{token.INT, "5"},
{token.LT, "<"},
{token.INT, "10"},
{token.RPAREN, ")"},
{token.LBRACE, "{"},
{token.RETURN, "return"},
{token.TRUE, "true"},
{token.SEMICOLON, ";"},
{token.RBRACE, "}"},
{token.ELSE, "else"},
{token.LBRACE, "{"},
{token.RETURN, "return"},
{token.FALSE, "false"},
{token.SEMICOLON, ";"},
{token.RBRACE, "}"},
{token.INT, "10"},
{token.EQ, "=="},
{token.INT, "10"},
{token.SEMICOLON, ";"},
{token.INT, "10"},
{token.NOT_EQ, "!="},
{token.INT, "9"},
{token.SEMICOLON, ";"},
{token.EOF, ""},
}

View File

@ -16,8 +16,17 @@ const (
INT = "INT"
// Operators
ASSIGN = "="
PLUS = "+"
ASSIGN = "="
PLUS = "+"
MINUS = "-"
BANG = "!"
ASTERISK = "*"
SLASH = "/"
EQ = "=="
NOT_EQ = "!="
LT = "<"
GT = ">"
// Delimiters
COMMA = ","
@ -30,11 +39,21 @@ const (
// Keywords
FUNCTION = "FUNCTION"
LET = "LET"
IF = "IF"
ELSE = "ELSE"
TRUE = "TRUE"
FALSE = "FALSE"
RETURN = "RETURN"
)
var keywords = map[string]TokenType{
"fn": FUNCTION,
"let": LET,
"fn": FUNCTION,
"let": LET,
"if": IF,
"else": ELSE,
"true": TRUE,
"false": FALSE,
"return": RETURN,
}
func LookupIdentifier(ident string) TokenType {