diff --git a/parser/parser.go b/parser/parser.go new file mode 100644 index 0000000..a5cce03 --- /dev/null +++ b/parser/parser.go @@ -0,0 +1,86 @@ +package parser + +import ( + "monkey/ast" + "monkey/lexer" + "monkey/token" +) + +type Parser struct { + lexer *lexer.Lexer + current_token token.Token + peek_token token.Token +} + +func New(l_lexer *lexer.Lexer) *Parser { + l_parser := &Parser{lexer: l_lexer} + + // Read two tokens so current_token and peek_token are both set + l_parser.next_token() + l_parser.next_token() + + return l_parser +} + +func (l_parser *Parser) ParseProgram() *ast.Program { + program := &ast.Program{} + program.Statements = []ast.Statement{} + + for l_parser.current_token.Type != token.EOF { + statement := l_parser.parse_statement() + if statement != nil { + program.Statements = append(program.Statements, statement) + } + l_parser.next_token() + } + return program +} + +func (l_parser *Parser) next_token() { + l_parser.current_token = l_parser.peek_token + l_parser.peek_token = l_parser.lexer.NextToken() +} + +func (l_parser *Parser) parse_statement() ast.Statement { + switch l_parser.current_token.Type { + case token.LET: + return l_parser.parse_let_statement() + default: + return nil + } +} + +func (l_parser *Parser) parse_let_statement() *ast.LetStatement { + statement := &ast.LetStatement{Token: l_parser.current_token} + if !l_parser.expect_peek(token.IDENT) { + return nil + } + + statement.Name = &ast.Identifier{Token: l_parser.current_token, Value: l_parser.current_token.Literal} + if !l_parser.expect_peek(token.ASSIGN) { + return nil + } + + // TODO(tijani): Skipping the expressins until there is a semicolon + for !l_parser.current_token_is(token.SEMICOLON) { + l_parser.next_token() + } + return statement +} + +func (l_parser *Parser) current_token_is(l_token token.TokenType) bool { + return l_parser.current_token.Type == l_token +} + +func (l_parser *Parser) peek_token_is(l_token token.TokenType) bool { + return l_parser.peek_token.Type == l_token +} + +func (l_parser *Parser) expect_peek(l_token token.TokenType) bool { + if l_parser.peek_token_is(l_token) { + l_parser.next_token() + return true + } else { + return false + } +} diff --git a/parser/parser_test.go b/parser/parser_test.go new file mode 100644 index 0000000..aa7fd03 --- /dev/null +++ b/parser/parser_test.go @@ -0,0 +1,67 @@ +package parser + +import ( + "monkey/ast" + "monkey/lexer" + "testing" +) + +func TestLetStatement(l_test *testing.T) { + input := ` + let x = 4; + let y = 19; + let foobar = 8948398493; + ` + + l_lexer := lexer.New(input) + l_parser := New(l_lexer) + + program := l_parser.ParseProgram() + if program == nil { + l_test.Fatalf("ParseProgram() returned nil") + } + + if len(program.Statements) != 3 { + l_test.Fatalf("program.Statements does not contain 3 statements, got=%d", len(program.Statements)) + } + + tests := []struct { + expected_identifier string + }{ + {"x"}, + {"y"}, + {"foobar"}, + } + + for i, tt := range tests { + statement := program.Statements[i] + if !testLetStatement(l_test, statement, tt.expected_identifier) { + return + } + } +} + +func testLetStatement(l_test *testing.T, statement ast.Statement, name string) bool { + if statement.TokenLiteral() != "let" { + l_test.Errorf("statement.TokenLiteral not let, got=%q", statement.TokenLiteral()) + return false + } + + let_statement, ok := statement.(*ast.LetStatement) + if !ok { + l_test.Errorf("statement not *ast.LetStatement, got=%T", statement) + return false + } + + if let_statement.Name.Value != name { + l_test.Errorf("let_statement.name.Value not %s, got=%s", name, let_statement.Name.Value) + return false + } + + if let_statement.Name.TokenLiteral() != name { + l_test.Errorf("let_statement.name.TokenLiteral() not %s, got=%s", name, let_statement.Name.TokenLiteral()) + return false + + } + return true +}